diff --git a/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala b/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala index aa4078f3..8a4a94b4 100644 --- a/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala +++ b/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala @@ -33,6 +33,7 @@ object NebulaSparkReaderExample { readEdges(spark) readVertexGraph(spark) readEdgeGraph(spark) + readEdgeWithNgql(spark) spark.close() sys.exit() @@ -178,11 +179,15 @@ object NebulaSparkReaderExample { .builder() .withSpace("test") .withLabel("friend") + // please make sure you have config the NoColumn true or returnCols with at least one column. + //.withNoColumn(true) + .withReturnCols(List("degree")) + // please make sure your ngql statement result is edge, connector does not check the statement. .withNgql("match (v)-[e:friend]-(v2) return e") .build() val edge = spark.read.nebula(config, nebulaReadConfig).loadEdgesToDfByNgql() edge.printSchema() edge.show(20) - println("veedgertex count: " + edge.count()) + println("edge count: " + edge.count()) } } diff --git a/nebula-spark-connector/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala b/nebula-spark-connector/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala index 3cd6c7d6..8856aa29 100644 --- a/nebula-spark-connector/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala +++ b/nebula-spark-connector/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala @@ -27,7 +27,7 @@ class WriteInsertSuite extends AnyFunSuite with BeforeAndAfterAll { test("write vertex into test_write_string space with insert mode") { SparkMock.writeVertex() val addresses: List[Address] = List(new Address("127.0.0.1", 9669)) - val graphProvider = new GraphProvider(addresses, 3000) + val graphProvider = new GraphProvider(addresses, 3000) graphProvider.switchSpace("root", "nebula", "test_write_string") val createIndexResult: ResultSet = graphProvider.submit( @@ -50,7 +50,7 @@ class WriteInsertSuite extends AnyFunSuite with BeforeAndAfterAll { SparkMock.writeEdge() val addresses: List[Address] = List(new Address("127.0.0.1", 9669)) - val graphProvider = new GraphProvider(addresses, 3000) + val graphProvider = new GraphProvider(addresses, 3000) graphProvider.switchSpace("root", "nebula", "test_write_string") val createIndexResult: ResultSet = graphProvider.submit( @@ -63,7 +63,7 @@ class WriteInsertSuite extends AnyFunSuite with BeforeAndAfterAll { graphProvider.submit("use test_write_string;") val resultSet: ResultSet = - graphProvider.submit("match (v:person_connector)-[e:friend_connector] -> () return e;") + graphProvider.submit("match (v:person_connector)-[e:friend_connector]-> () return e;") assert(resultSet.isSucceeded) assert(resultSet.getColumnNames.size() == 1) assert(resultSet.getRows.size() == 13) diff --git a/nebula-spark-connector_2.2/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala b/nebula-spark-connector_2.2/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala index 5b9bf051..11bee4df 100644 --- a/nebula-spark-connector_2.2/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala +++ b/nebula-spark-connector_2.2/src/test/scala/com/vesoft/nebula/connector/writer/WriteInsertSuite.scala @@ -65,7 +65,7 @@ class WriteInsertSuite extends AnyFunSuite with BeforeAndAfterAll { graphProvider.submit("use test_write_string;") val resultSet: ResultSet = - graphProvider.submit("match (v:person_connector)-[e:friend_connector] -> () return e;") + graphProvider.submit("match (v:person_connector)-[e:friend_connector]-> () return e;") assert(resultSet.getColumnNames.size() == 1) assert(resultSet.getRows.size() == 13)