diff --git a/comparison-benchmark/rust/raphtory-rust-benchmark/src/main.rs b/comparison-benchmark/rust/raphtory-rust-benchmark/src/main.rs index 70a5d919e0..7b8616eed8 100644 --- a/comparison-benchmark/rust/raphtory-rust-benchmark/src/main.rs +++ b/comparison-benchmark/rust/raphtory-rust-benchmark/src/main.rs @@ -176,8 +176,8 @@ fn main() { if debug { println!( "Graph has {} vertices and {} edges", - g.num_vertices(), - g.num_edges() + g.count_vertices(), + g.count_edges() ) } diff --git a/examples/custom-algorithm/src/main.rs b/examples/custom-algorithm/src/main.rs index 081cd72387..97c250f6f8 100644 --- a/examples/custom-algorithm/src/main.rs +++ b/examples/custom-algorithm/src/main.rs @@ -29,7 +29,7 @@ impl Algorithm for DummyAlgorithm { ) -> FieldResult>> { let mandatory_arg = ctx.args.try_get("mandatoryArg")?.u64()?; let optional_arg = ctx.args.get("optionalArg").map(|v| v.u64()).transpose()?; - let num_vertices = graph.num_vertices(); + let num_vertices = graph.count_vertices(); let output = Self { number_of_nodes: num_vertices, message: format!("mandatory arg: '{mandatory_arg}', optional arg: '{optional_arg:?}'"), diff --git a/examples/custom_python_extension/src/lib.rs b/examples/custom_python_extension/src/lib.rs index e081ec3058..443ccd5787 100644 --- a/examples/custom_python_extension/src/lib.rs +++ b/examples/custom_python_extension/src/lib.rs @@ -2,7 +2,7 @@ use pyo3::prelude::*; use raphtory::{db::api::view::internal::DynamicGraph, prelude::GraphViewOps}; fn custom_algorithm(graph: &G) -> usize { - graph.num_vertices() + graph.count_vertices() } #[pyfunction(name = "custom_algorithm")] diff --git a/examples/py/math_overflow/mo_investigate.ipynb b/examples/py/math_overflow/mo_investigate.ipynb index 940b227509..141cd7ee9c 100644 --- a/examples/py/math_overflow/mo_investigate.ipynb +++ b/examples/py/math_overflow/mo_investigate.ipynb @@ -307,7 +307,7 @@ "views = g.expanding(step = 86400)\n", "timestamps = [dt.datetime.fromtimestamp(v.latest_time()) for v in views]\n", "\n", - "aggr_vertices = [v.num_vertices() for v in views]\n", + "aggr_vertices = [v.count_vertices() for v in views]\n", "\n", "agg_window = np.zeros(len(timestamps),dtype=int)\n", "df = pd.DataFrame({\"time\": timestamps, \"window\":agg_window, \"vertices\": aggr_vertices})\n", @@ -318,7 +318,7 @@ " diff_size = int(window_sizes[i]/86400)\n", "\n", " timestamps = [dt.datetime.fromtimestamp(v.latest_time()) for v in views]\n", - " vertices = [v.num_vertices() for v in views]\n", + " vertices = [v.count_vertices() for v in views]\n", "\n", " to_join = pd.DataFrame({\"time\": timestamps, \"window\":[window_sizes[i] for k in range(len(timestamps))], \"vertices\": vertices})\n", " df = pd.concat([df,to_join],copy=False)\n", @@ -402,7 +402,7 @@ " sx_shuffled = shuffle_column(sx_df,col_number=2)\n", " g_shuff = load_pandas(sx_shuffled)\n", " views = g_shuff.rolling(window=window_sizes[i],step=86400)\n", - " results_vertices[ex,:] = np.array([v.num_vertices() for v in views])\n", + " results_vertices[ex,:] = np.array([v.count_vertices() for v in views])\n", " df.loc[df['window'] == w, 'vert_shuffled_mean'] = results_vertices.mean(axis=0)\n", " df.loc[df['window'] == w, 'vert_shuffled_sd'] = results_vertices.std(axis=0)" ] @@ -455,7 +455,7 @@ "outputs": [], "source": [ "views = g.rolling(window=3600, step=3600)\n", - "hour_vertices = [v.num_vertices() for v in views]" + "hour_vertices = [v.count_vertices() for v in views]" ] }, { diff --git a/examples/py/reddit/demo.ipynb b/examples/py/reddit/demo.ipynb index 02462b1ec1..8413604f12 100644 --- a/examples/py/reddit/demo.ipynb +++ b/examples/py/reddit/demo.ipynb @@ -9,7 +9,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 31, "metadata": {}, "outputs": [], "source": [ @@ -25,24 +25,119 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 32, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Total vertices in the graph: 3\n", + " DateTime Name Recipient Type Layer\n", + "0 2023-09-06 10:00:00 Alice David Email 1\n", + "1 2023-09-06 11:30:00 Bob Eve Message 2\n", + "2 2023-09-06 13:45:00 Charlie Frank Call 3\n", + "3 2023-09-06 13:50:00 Alice David Message 4\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "\n", + "# Sample data for demonstration\n", + "data = {\n", + " 'DateTime': ['2023-09-06 10:00:00', '2023-09-06 11:30:00', '2023-09-06 13:45:00', '2023-09-06 13:50:00'],\n", + " 'Name': ['Alice', 'Bob', 'Charlie', 'Alice'],\n", + " 'Recipient': ['David', 'Eve', 'Frank', 'David'],\n", + " 'Type': ['Email', 'Message', 'Call', 'Message'],\n", + " \"Layer\": [1, 2, 3, 4],\n", + "}\n", + "\n", + "# Create the DataFrame\n", + "df = pd.DataFrame(data)\n", + "\n", + "# Print the DataFrame\n", + "print(df)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "df[\"DateTime\"] = pd.to_datetime(df[\"DateTime\"]).astype(\"datetime64[ms]\")" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading edges: 100%|██████████| 4.00/4.00 [00:00<00:00, 18.7Kit/s]0:00, 3.19Kit/s]" + ] + } + ], + "source": [ + "g=Graph()\n", + "g.load_edges_from_pandas(edge_df=df, src_col=\"Name\", dst_col=\"Recipient\", time_col=\"DateTime\", props=[\"Type\"], layer_in_df=\"Layer\")\n", + "df.dropna(axis=0, inplace=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Edge(source=Alice, target=David, earliest_time=1693994400000, latest_time=1694008200000, properties={Type: Message})\n" + ] + } + ], + "source": [ + "e= g.edge(\"Alice\",\"David\")\n", + "print(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vertex(name=Ben, earliest_time=\"1560419400000\", latest_time=\"1560419400000\", properties={type: person, _id: Ben})\n", + "Vertex(name=Hamza, earliest_time=\"1560419400000\", latest_time=\"1560419400000\", properties={type: person, _id: Hamza})\n", + "Edge(source=Ben, target=Hamza, earliest_time=1560419400000, latest_time=1560419400000, properties={type: friend})\n", + "Total vertices in the graph: 2\n", "Total vertices at 2021-01-01 12:40:00: 2\n" ] } ], "source": [ "g = Graph()\n", - "g.add_edge(\"2021-01-01 12:32:00\", \"Ben\", \"Hamza\", {\"type\": \"friend\"})\n", - "g.add_edge(\"2021-01-02 14:15:36\", \"Hamza\", \"Haaroon\", {\"type\": \"friend\"})\n", - "print(\"Total vertices in the graph:\", g.num_vertices())\n", - "print(\"Total vertices at 2021-01-01 12:40:00:\", g.at(\"2021-01-01 12:40:00\").num_vertices())\n" + "# e = g.add_edge(\"2021-01-01 12:32:00\", \"Ben\", \"Hamza\", {\"type\": \"friend\"})\n", + "\n", + "# g.add_edge(\"2021-01-02 14:15:36\", \"Hamza\", \"Haaroon\", {\"type\": \"friend\"})\n", + "vertex = g.add_vertex(\"2019-06-13 09:50:00\", \"Ben\", {\"type\": \"person\"})\n", + "vertex2 = g.add_vertex(\"2019-06-13 09:50:00\", \"Hamza\", {\"type\": \"person\"})\n", + "edge = g.add_edge(\"2019-06-13 09:50:00\", \"Ben\", \"Hamza\", {\"type\": \"friend\"})\n", + "\n", + "# edge = g.add_edge(1, \"Ben\", \"Hamza\", {\"type\": \"friend\"})\n", + "# edge2 = g.add_edge(2, \"Ben\", \"Hamza\", {\"type\": \"friend\"})\n", + "print(vertex)\n", + "print(vertex2)\n", + "print(edge)\n", + "print(\"Total vertices in the graph:\", g.count_vertices())\n", + "print(\"Total vertices at 2021-01-01 12:40:00:\", g.at(\"2021-01-01 12:40:00\").count_vertices())\n" ] }, { @@ -54,7 +149,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 37, "metadata": {}, "outputs": [ { @@ -70,14 +165,14 @@ "g = Graph()\n", "\n", "\n", - "g.add_vertex(timestamp=1, id=\"ben\", properties={\"property 1\": 1, \"property 3\": \"hi\", \"property 4\": True})\n", + "v=g.add_vertex(timestamp=1, id=\"ben\", properties={\"property 1\": 1, \"property 3\": \"hi\", \"property 4\": True})\n", "g.add_vertex(timestamp=2, id=\"ben\", properties={\"property 1\": 2, \"property 2\": 0.6, \"property 4\": False})\n", "g.add_vertex(timestamp=3, id=\"ben\", properties={\"property 2\": 0.9, \"property 3\": \"hello\", \"property 4\": True})\n", "\n", - "g.add_vertex_properties(id=\"ben\", properties={\"unchanging property\": 123})\n", + "v.add_constant_properties(properties={\"unchanging property\": 123})\n", "\n", - "print(g.vertex(\"ben\").properties.temporal.get(\"property 1\").values())\n", - "print(g.vertex(\"ben\").properties.constant.get(\"unchanging property\"))\n" + "print(v.properties.temporal.get(\"property 1\").values())\n", + "print(v.properties.constant.get(\"unchanging property\"))\n" ] }, { @@ -89,16 +184,16 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 38, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Edge(source=ben, target=hamza, earliest_time=1, latest_time=3, properties={Properties({property 1: 2, property 4: true, property 3: hello, property 2: 0.9, static property: 123})})" + "Edge(source=ben, target=hamza, earliest_time=1, latest_time=3, properties={property 1: 2, property 4: true, property 3: hello, property 2: 0.9, static property: 123})" ] }, - "execution_count": 4, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -106,11 +201,11 @@ "source": [ "\n", "g.add_vertex(timestamp=1,id=\"hamza\")\n", - "g.add_edge(timestamp=1, src=\"ben\", dst=\"hamza\", properties={\"property 1\": 1, \"property 3\": \"hi\", \"property 4\": True})\n", + "e=g.add_edge(timestamp=1, src=\"ben\", dst=\"hamza\", properties={\"property 1\": 1, \"property 3\": \"hi\", \"property 4\": True})\n", "g.add_edge(timestamp=2, src=\"ben\", dst=\"hamza\", properties={\"property 1\": 2, \"property 2\": 0.6, \"property 4\": False})\n", "g.add_edge(timestamp=3, src=\"ben\", dst=\"hamza\", properties={\"property 2\": 0.9, \"property 3\": \"hello\", \"property 4\": True})\n", "\n", - "g.add_edge_properties(src=\"ben\", dst=\"hamza\", properties={\"static property\": 123})\n", + "e.add_constant_properties(properties={\"static property\": 123})\n", "\n", "g.edge(\"ben\",\"hamza\")" ] @@ -131,7 +226,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ @@ -212,14 +307,14 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 40, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Loading edges: 100%|██████████| 572K/572K [00:08<00:00, 70.9Kit/s]s]00, 1.90Kit/s]" + "Loading edges: 100%|██████████| 572K/572K [00:25<00:00, 22.7Kit/s]s]08, 726it/s]es: 24%|██▄ | 137K/572K [00:05<00:18, 24.0Kit/s]" ] } ], @@ -229,7 +324,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 41, "metadata": {}, "outputs": [ { @@ -247,7 +342,7 @@ "PyPropHistValueList([[0.2732], [0.6249, 0.7957, 0], [0], [0.1779], [0.2003], [0.34], [0.4215], [-0.3182], [-0.4767], [0.296], ...])" ] }, - "execution_count": 7, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } @@ -261,7 +356,7 @@ "edge_perspective = reddit_graph.at(date).edge(\"conspiracy\",\"documentaries\")\n", "print(\"Most recent sentiment on\",date,\"-\",edge_perspective[\"compound_sentiment\"])\n", "\n", - "reddit_graph.vertex(\"conspiracy\").out_edges().dst().out_edges().properties.temporal.get(\"compound_sentiment\").values()" + "reddit_graph.vertex(\"conspiracy\").out_edges.dst.out_edges.properties.temporal.get(\"compound_sentiment\").values()" ] }, { @@ -273,14 +368,14 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 42, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "1217it [01:27, 13.98it/s]\n" + "1217it [01:06, 18.30it/s]\n" ] }, { @@ -289,7 +384,7 @@ "" ] }, - "execution_count": 8, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" }, @@ -312,8 +407,8 @@ "edge_count = []\n", "\n", "for view in tqdm(views):\n", - " timestamps.append(view.latest_date_time())\n", - " edge_count.append(view.num_edges()) \n", + " timestamps.append(view.latest_date_time)\n", + " edge_count.append(view.count_edges()) \n", "\n", "sns.set_context()\n", "ax = plt.gca()\n", @@ -332,16 +427,16 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 50, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 9, + "execution_count": 50, "metadata": {}, "output_type": "execute_result" }, @@ -364,19 +459,12 @@ "negative_sentiment_in = []\n", "negative_sentiment_out = []\n", "\n", - "def weighted_average_degree(vertex,prop_name,incoming):\n", - " edges = vertex.in_edges() if incoming else vertex.out_edges()\n", - " total_weight=0\n", - " for edge_weights in edges.properties.temporal.get(prop_name).values():\n", - " total_weight+= sum(edge_weights)\n", - " return total_weight / max(1,len(list(edges)))\n", - "\n", "for vertex in views:\n", - " timestamps.append(vertex.latest_date_time())\n", - " positive_sentiment_in.append(weighted_average_degree(vertex,\"positive_sentiment\",incoming=True))\n", - " positive_sentiment_out.append(weighted_average_degree(vertex,\"positive_sentiment\",incoming=False))\n", - " negative_sentiment_in.append(weighted_average_degree(vertex,\"negative_sentiment\",incoming=True))\n", - " negative_sentiment_out.append(weighted_average_degree(vertex,\"negative_sentiment\",incoming=False)) \n", + " timestamps.append(vertex.latest_date_time)\n", + " positive_sentiment_in.append(vertex.in_edges.properties.temporal.get(\"positive_sentiment\").values().sum().mean())\n", + " positive_sentiment_out.append(vertex.out_edges.properties.temporal.get(\"positive_sentiment\").values().sum().mean())\n", + " negative_sentiment_in.append(vertex.in_edges.properties.temporal.get(\"negative_sentiment\").values().sum().mean())\n", + " negative_sentiment_out.append(vertex.out_edges.properties.temporal.get(\"negative_sentiment\").values().sum().mean())\n", "\n", "sns.set()\n", "sns.set_palette(\"pastel\")\n", @@ -408,7 +496,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 51, "metadata": {}, "outputs": [ { @@ -417,11 +505,11 @@ "text": [ "The conspiracy subreddit has a pagerank score of 0.0028709357281156145\n", " Key Value\n", - "3928 askreddit 0.019556\n", - "12933 iama 0.015616\n", - "29558 pics 0.009884\n", - "2457 funny 0.009283\n", - "2812 videos 0.006105\n", + "1746 askreddit 0.019556\n", + "295 iama 0.015616\n", + "3236 pics 0.009884\n", + "22085 funny 0.009283\n", + "40067 videos 0.006105\n", "The top five ranked subreddits are [('askreddit', 0.019555592169738754), ('iama', 0.01561587791951029), ('pics', 0.009884204062062652), ('funny', 0.009282589235120708), ('videos', 0.006105153065518092)]\n" ] } @@ -451,7 +539,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -472,8 +560,15 @@ "component_with_biggest_size = max(component_sizes,key=component_sizes.get)\n", "#Get the value of this component\n", "lcc=component_sizes[component_with_biggest_size]\n", - "print(\"The largest component has\",lcc,\"nodes out of a total of\",reddit_graph.num_vertices())" + "print(\"The largest component has\",lcc,\"nodes out of a total of\",reddit_graph.count_vertices())" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/examples/rust/src/bin/bench/main.rs b/examples/rust/src/bin/bench/main.rs index a11702970b..166c08b8fd 100644 --- a/examples/rust/src/bin/bench/main.rs +++ b/examples/rust/src/bin/bench/main.rs @@ -43,8 +43,8 @@ fn main() { println!( "Loaded graph from encoded data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -70,8 +70,8 @@ fn main() { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); diff --git a/examples/rust/src/bin/btc/main.rs b/examples/rust/src/bin/btc/main.rs index 9e35a6554a..339a9fd889 100644 --- a/examples/rust/src/bin/btc/main.rs +++ b/examples/rust/src/bin/btc/main.rs @@ -68,8 +68,8 @@ fn main() { println!( "Loaded graph from path {} with {} vertices, {} edges, took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -104,8 +104,8 @@ fn main() { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -115,8 +115,8 @@ fn main() { g }; - assert_eq!(graph.num_vertices(), 9132396); - assert_eq!(graph.num_edges(), 5087223); + assert_eq!(graph.count_vertices(), 9132396); + assert_eq!(graph.count_edges(), 5087223); let windowed_graph = graph.window(0, i64::MAX); diff --git a/examples/rust/src/bin/crypto/main.rs b/examples/rust/src/bin/crypto/main.rs index 9546e0c721..fb635a2d10 100644 --- a/examples/rust/src/bin/crypto/main.rs +++ b/examples/rust/src/bin/crypto/main.rs @@ -19,11 +19,11 @@ fn main() { let g = stable_coin_graph(data_dir, true); - assert_eq!(g.num_vertices(), 1523333); - assert_eq!(g.num_edges(), 2814155); + assert_eq!(g.count_vertices(), 1523333); + assert_eq!(g.count_edges(), 2814155); assert_eq!( - g.get_unique_layers().into_iter().sorted().collect_vec(), + g.unique_layers().into_iter().sorted().collect_vec(), vec!["Dai", "LUNC", "USD", "USDP", "USDT", "USTC"] ); diff --git a/examples/rust/src/bin/hulongbay/main.rs b/examples/rust/src/bin/hulongbay/main.rs index c1a4e6520e..0214733abf 100644 --- a/examples/rust/src/bin/hulongbay/main.rs +++ b/examples/rust/src/bin/hulongbay/main.rs @@ -64,8 +64,8 @@ pub fn loader(data_dir: &Path) -> Result> { println!( "Loaded graph from path {} with {} vertices, {} edges, took {} seconds", encoded_data_dir.display(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -95,8 +95,8 @@ pub fn loader(data_dir: &Path) -> Result> { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.display(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -162,7 +162,7 @@ fn try_main() -> Result<(), Box> { ); let now = Instant::now(); - let num_windowed_edges2 = window.num_edges(); + let num_windowed_edges2 = window.count_edges(); println!( "Window num_edges returned {} in {} seconds", num_windowed_edges2, @@ -190,7 +190,7 @@ fn try_main_bm() -> Result<(), Box> { println!("graph time range: {}-{}", earliest_time, latest_time); let now = Instant::now(); - let num_edges2 = graph.num_edges(); + let num_edges2 = graph.count_edges(); println!( "num_edges returned {} in {} milliseconds", num_edges2, diff --git a/examples/rust/src/bin/lotr/main.rs b/examples/rust/src/bin/lotr/main.rs index 07fe2681cc..530cfe5970 100644 --- a/examples/rust/src/bin/lotr/main.rs +++ b/examples/rust/src/bin/lotr/main.rs @@ -44,8 +44,8 @@ fn main() { println!( "Loaded graph from encoded data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -87,8 +87,8 @@ fn main() { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -98,8 +98,8 @@ fn main() { g }; - assert_eq!(graph.num_vertices(), 139); - assert_eq!(graph.num_edges(), 701); + assert_eq!(graph.count_vertices(), 139); + assert_eq!(graph.count_edges(), 701); let gandalf = hashing::calculate_hash(&"Gandalf"); diff --git a/examples/rust/src/bin/pokec/main.rs b/examples/rust/src/bin/pokec/main.rs index 9cbacfa3bc..1d3cd55023 100644 --- a/examples/rust/src/bin/pokec/main.rs +++ b/examples/rust/src/bin/pokec/main.rs @@ -45,8 +45,8 @@ fn main() { println!( "Loaded graph from encoded data files {} with {} vertices, {} edges which took {} seconds", data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); diff --git a/python/python/raphtory/export.py b/python/python/raphtory/export.py index 4fb898c62c..c643d65fd5 100644 --- a/python/python/raphtory/export.py +++ b/python/python/raphtory/export.py @@ -67,7 +67,7 @@ def to_pyvis( ) } - for v in graph.vertices(): + for v in graph.vertices: image = ( v.properties.get(node_image) if node_image != None @@ -76,14 +76,14 @@ def to_pyvis( shape = shape if shape is not None else "dot" if colour_nodes_by_type: visGraph.add_node( - v.id(), - label=v.name(), + v.id, + label=v.name, shape=shape, image=image, group=groups[v.properties.get(type_property)], ) else: - visGraph.add_node(v.id(), label=v.name(), shape=shape, image=image) + visGraph.add_node(v.id, label=v.name, shape=shape, image=image) edges = graph.edges().explode() if explode_edges else graph.edges().explode_layers() for e in edges: @@ -94,8 +94,8 @@ def to_pyvis( if label is None: label = "" visGraph.add_edge( - e.src().id(), - e.dst().id(), + e.src.id, + e.dst.id, value=weight, color=edge_color, title=label, @@ -133,7 +133,7 @@ def to_networkx( networkXGraph = nx.MultiDiGraph() vertex_tuples = [] - for v in graph.vertices(): + for v in graph.vertices: properties = {} if include_vertex_properties: if include_property_histories: @@ -143,27 +143,27 @@ def to_networkx( properties = v.properties.as_dict() if include_update_history: properties.update({"update_history": v.history()}) - vertex_tuples.append((v.name(), properties)) + vertex_tuples.append((v.name, properties)) networkXGraph.add_nodes_from(vertex_tuples) edge_tuples = [] edges = graph.edges().explode() if explode_edges else graph.edges().explode_layers() for e in edges: properties = {} - src = e.src().name() - dst = e.dst().name() + src = e.src.name + dst = e.dst.name if include_edge_properties: if include_property_histories: properties.update(e.properties.constant.as_dict()) properties.update(e.properties.temporal.histories()) else: properties = e.properties.as_dict() - layer = e.layer_name() + layer = e.layer_name if layer is not None: properties.update({"layer": layer}) if include_update_history: if explode_edges: - properties.update({"update_history": e.time()}) + properties.update({"update_history": e.time}) else: properties.update({"update_history": e.history()}) edge_tuples.append((src, dst, properties)) @@ -205,7 +205,7 @@ def to_edge_df( edges = graph.edges().explode() if explode_edges else graph.edges().explode_layers() for e in edges: - tuple = [e.src().name(), e.dst().name(), e.layer_name()] + tuple = [e.src.name, e.dst.name, e.layer_name] if include_edge_properties: properties = {} if include_property_histories: @@ -217,7 +217,7 @@ def to_edge_df( if include_update_history: if explode_edges: - tuple.append(e.time()) + tuple.append(e.time) else: tuple.append(e.history()) @@ -255,8 +255,8 @@ def to_vertex_df( if include_update_history: columns.append("update_history") - for v in graph.vertices(): - tuple = [v.name()] + for v in graph.vertices: + tuple = [v.name] if include_vertex_properties: properties = {} if include_property_histories: diff --git a/python/tests/notebook.ipynb b/python/tests/notebook.ipynb index fc05b71fac..fe85c71777 100644 --- a/python/tests/notebook.ipynb +++ b/python/tests/notebook.ipynb @@ -72,7 +72,7 @@ "# checking edge 1,2 exists and 2,1 doesn't as Raphtory is directed\n", "print(g.has_edge(1,2),g.has_edge(2,1))\n", "# Check the total number of edges and vertices\n", - "print(g.num_edges(),g.num_vertices())\n", + "print(g.count_edges(),g.count_vertices())\n", "\n", "# Adding vertices and edges with String IDs\n", "g.add_vertex(timestamp=5,id=\"Ben\")\n", @@ -81,7 +81,7 @@ "# Performing the same checks as before, but with strings\n", "print(g.has_vertex(id=\"Ben\"), g.has_vertex(id=\"Hamza\"), g.has_vertex(id=\"Dave\"))\n", "print(g.has_edge(src=\"Hamza\",dst=\"Ben\"),g.has_edge(src=\"Ben\",dst=\"Hamza\"))\n", - "print(g.num_edges(),g.num_vertices())\n", + "print(g.count_edges(),g.count_vertices())\n", "\n", "#Add an edge with Temporal Properties which can change over time\n", "e = g.add_edge(timestamp=7,src=\"Haaroon\",dst=\"Hamza\",properties={\"property1\": 1, \"property2\": 9.8, \"property3\": \"test\"})\n", @@ -135,7 +135,7 @@ "\n", "ids = []\n", "degrees = []\n", - "for v in view.vertices():\n", + "for v in view.vertices:\n", " ids.append(v.id)\n", " degrees.append(v.degree())\n", "\n", @@ -176,10 +176,10 @@ "degree = []\n", "\n", "for view in views:\n", - " timestamps.append(view.latest_time())\n", + " timestamps.append(view.latest_time)\n", " #vertex_count.append(view.num_vertices()) \n", " #edge_count.append(view.num_edges())\n", - " degree.append(view.num_edges()/max(1,view.num_vertices())) \n", + " degree.append(view.count_edges()/max(1,view.count_vertices())) \n", " \n", "sns.set_context()\n", "ax = plt.gca()\n", @@ -201,7 +201,7 @@ "degree = []\n", "\n", "for view in views:\n", - " timestamps.append(view.latest_time())\n", + " timestamps.append(view.latest_time)\n", " gandalf = view.vertex(\"Gandalf\")\n", " if(gandalf is not None):\n", " degree.append(gandalf.degree())\n", @@ -244,4 +244,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/python/tests/test_graph_conversions.py b/python/tests/test_graph_conversions.py index 891be8b5fb..a0beed1628 100644 --- a/python/tests/test_graph_conversions.py +++ b/python/tests/test_graph_conversions.py @@ -954,7 +954,8 @@ def test_to_df(): g = build_graph() compare_df( - export.to_edge_df(g), pd.read_json(base_dir / "expected/dataframe_output/edge_df_all.json") + export.to_edge_df(g), + pd.read_json(base_dir / "expected/dataframe_output/edge_df_all.json"), ) compare_df( @@ -978,17 +979,23 @@ def test_to_df(): ) compare_df( export.to_edge_df(g, explode_edges=True, include_edge_properties=False), - pd.read_json(base_dir / "expected/dataframe_output/edge_df_exploded_no_props.json"), + pd.read_json( + base_dir / "expected/dataframe_output/edge_df_exploded_no_props.json" + ), ) compare_df( export.to_edge_df(g, explode_edges=True, include_update_history=False), - pd.read_json(base_dir / "expected/dataframe_output/edge_df_exploded_no_hist.json"), + pd.read_json( + base_dir / "expected/dataframe_output/edge_df_exploded_no_hist.json" + ), ) compare_df( export.to_edge_df(g, explode_edges=True, include_property_histories=False), - pd.read_json(base_dir / "expected/dataframe_output/edge_df_exploded_no_prop_hist.json"), + pd.read_json( + base_dir / "expected/dataframe_output/edge_df_exploded_no_prop_hist.json" + ), ) compare_df( @@ -1005,5 +1012,7 @@ def test_to_df(): ) compare_df( export.to_vertex_df(g, include_property_histories=False), - pd.read_json(base_dir / "expected/dataframe_output/vertex_df_no_prop_hist.json"), + pd.read_json( + base_dir / "expected/dataframe_output/vertex_df_no_prop_hist.json" + ), ) diff --git a/python/tests/test_graphdb.py b/python/tests/test_graphdb.py index 86388e1a2a..0651897405 100644 --- a/python/tests/test_graphdb.py +++ b/python/tests/test_graphdb.py @@ -44,19 +44,19 @@ def create_graph_with_deletions(): def test_graph_len_edge_len(): g = create_graph() - assert g.num_vertices() == 3 - assert g.num_edges() == 5 + assert g.count_vertices() == 3 + assert g.count_edges() == 5 def test_id_iterable(): g = create_graph() - assert g.vertices.id().max() == 3 - assert g.vertices.id().min() == 1 - assert set(g.vertices.id().collect()) == {1, 2, 3} - out_neighbours = g.vertices.out_neighbours().id().collect() + assert g.vertices.id.max() == 3 + assert g.vertices.id.min() == 1 + assert set(g.vertices.id.collect()) == {1, 2, 3} + out_neighbours = g.vertices.out_neighbours.id.collect() out_neighbours = (set(n) for n in out_neighbours) - out_neighbours = dict(zip(g.vertices.id(), out_neighbours)) + out_neighbours = dict(zip(g.vertices.id, out_neighbours)) assert out_neighbours == {1: {1, 2, 3}, 2: {1}, 3: {2}} @@ -79,8 +79,8 @@ def test_degree_iterable(): def test_vertices_time_iterable(): g = create_graph() - assert g.vertices.earliest_time().min() == -1 - assert g.vertices.latest_time().max() == 7 + assert g.vertices.earliest_time.min() == -1 + assert g.vertices.latest_time.max() == 7 def test_graph_has_edge(): @@ -108,7 +108,7 @@ def test_windowed_graph_get_vertex(): view = g.window(0, sys.maxsize) - assert view.vertex(1).id() == 1 + assert view.vertex(1).id == 1 assert view.vertex(10) is None assert view.vertex(1).degree() == 3 @@ -118,17 +118,17 @@ def test_windowed_graph_degree(): view = g.window(0, sys.maxsize) - degrees = [v.degree() for v in view.vertices()] + degrees = [v.degree() for v in view.vertices] degrees.sort() assert degrees == [2, 2, 3] - in_degrees = [v.in_degree() for v in view.vertices()] + in_degrees = [v.in_degree() for v in view.vertices] in_degrees.sort() assert in_degrees == [1, 1, 2] - out_degrees = [v.out_degree() for v in view.vertices()] + out_degrees = [v.out_degree() for v in view.vertices] out_degrees.sort() assert out_degrees == [0, 1, 3] @@ -142,14 +142,14 @@ def test_windowed_graph_get_edge(): view = g.window(min_size, max_size) - assert (view.edge(1, 3).src().id(), view.edge(1, 3).dst().id()) == (1, 3) + assert (view.edge(1, 3).src.id, view.edge(1, 3).dst.id) == (1, 3) assert view.edge(2, 3) is None assert view.edge(6, 5) is None - assert (view.vertex(1).id(), view.vertex(3).id()) == (1, 3) + assert (view.vertex(1).id, view.vertex(3).id) == (1, 3) view = g.window(2, 3) - assert (view.edge(1, 3).src().id(), view.edge(1, 3).dst().id()) == (1, 3) + assert (view.edge(1, 3).src.id, view.edge(1, 3).dst.id) == (1, 3) view = g.window(3, 7) assert view.edge(1, 3) is None @@ -160,27 +160,27 @@ def test_windowed_graph_edges(): view = g.window(0, sys.maxsize) - tedges = [v.edges() for v in view.vertices()] + tedges = [v.edges for v in view.vertices] edges = [] for e_iter in tedges: for e in e_iter: - edges.append([e.src().id(), e.dst().id()]) + edges.append([e.src.id, e.dst.id]) assert edges == [[1, 1], [1, 1], [1, 2], [1, 3], [1, 2], [3, 2], [1, 3], [3, 2]] - tedges = [v.in_edges() for v in view.vertices()] + tedges = [v.in_edges for v in view.vertices] in_edges = [] for e_iter in tedges: for e in e_iter: - in_edges.append([e.src().id(), e.dst().id()]) + in_edges.append([e.src.id, e.dst.id]) assert in_edges == [[1, 1], [1, 2], [3, 2], [1, 3]] - tedges = [v.out_edges() for v in view.vertices()] + tedges = [v.out_edges for v in view.vertices] out_edges = [] for e_iter in tedges: for e in e_iter: - out_edges.append([e.src().id(), e.dst().id()]) + out_edges.append([e.src.id, e.dst.id]) assert out_edges == [[1, 1], [1, 2], [1, 3], [3, 2]] @@ -188,11 +188,11 @@ def test_windowed_graph_edges(): def test_windowed_graph_vertex_ids(): g = create_graph() - vs = [v for v in g.window(-1, 2).vertices().id()] + vs = [v for v in g.window(-1, 2).vertices.id] vs.sort() assert vs == [1, 2] # this makes clear that the end of the range is exclusive - vs = [v for v in g.window(-5, 3).vertices().id()] + vs = [v for v in g.window(-5, 3).vertices.id] vs.sort() assert vs == [1, 2, 3] @@ -202,7 +202,7 @@ def test_windowed_graph_vertices(): view = g.window(-1, 0) - vertices = list(view.vertices().id()) + vertices = list(view.vertices.id) assert vertices == [1, 2] @@ -215,13 +215,13 @@ def test_windowed_graph_neighbours(): view = g.window(min_size, max_size) - neighbours = view.vertices.neighbours().id().collect() + neighbours = view.vertices.neighbours.id.collect() assert neighbours == [[1, 2, 3], [1, 3], [1, 2]] - in_neighbours = view.vertices.in_neighbours().id().collect() + in_neighbours = view.vertices.in_neighbours.id.collect() assert in_neighbours == [[1, 2], [1, 3], [1]] - out_neighbours = view.vertices.out_neighbours().id().collect() + out_neighbours = view.vertices.out_neighbours.id.collect() assert out_neighbours == [[1, 2, 3], [1], [2]] @@ -230,9 +230,9 @@ def test_name(): g.add_vertex(1, "Ben") g.add_vertex(1, 10) g.add_edge(1, "Ben", "Hamza") - assert g.vertex(10).name() == "10" - assert g.vertex("Ben").name() == "Ben" - assert g.vertex("Hamza").name() == "Hamza" + assert g.vertex(10).name == "10" + assert g.vertex("Ben").name == "Ben" + assert g.vertex("Hamza").name == "Hamza" def test_getitem(): @@ -382,11 +382,11 @@ def history_test(key, value): if value is None: assert g.vertex(1).properties.temporal.get(key) is None assert g.vertices.properties.temporal.get(key) is None - assert g.vertices.out_neighbours().properties.temporal.get(key) is None + assert g.vertices.out_neighbours.properties.temporal.get(key) is None else: assert g.vertex(1).properties.temporal.get(key).items() == value assert g.vertices.properties.temporal.get(key).items() == [value] - assert g.vertices.out_neighbours().properties.temporal.get(key).items() == [ + assert g.vertices.out_neighbours.properties.temporal.get(key).items() == [ [value] ] @@ -401,13 +401,12 @@ def time_history_test(time, key, value): assert g.at(time).vertex(1).properties.temporal.get(key) is None assert g.at(time).vertices.properties.temporal.get(key) is None assert ( - g.at(time).vertices.out_neighbours().properties.temporal.get(key) - is None + g.at(time).vertices.out_neighbours.properties.temporal.get(key) is None ) else: assert g.at(time).vertex(1).properties.temporal.get(key).items() == value assert g.at(time).vertices.properties.temporal.get(key).items() == [value] - assert g.at(time).vertices.out_neighbours().properties.temporal.get( + assert g.at(time).vertices.out_neighbours.properties.temporal.get( key ).items() == [[value]] @@ -419,23 +418,21 @@ def time_static_property_test(time, key, value): if value is None: assert gg.vertex(1).properties.constant.get(key) is None assert gg.vertices.properties.constant.get(key) is None - assert gg.vertices.out_neighbours().properties.constant.get(key) is None + assert gg.vertices.out_neighbours.properties.constant.get(key) is None else: assert gg.vertex(1).properties.constant.get(key) == value assert gg.vertices.properties.constant.get(key) == [value] - assert gg.vertices.out_neighbours().properties.constant.get(key) == [ - [value] - ] + assert gg.vertices.out_neighbours.properties.constant.get(key) == [[value]] def static_property_test(key, value): if value is None: assert g.vertex(1).properties.constant.get(key) is None assert g.vertices.properties.constant.get(key) is None - assert g.vertices.out_neighbours().properties.constant.get(key) is None + assert g.vertices.out_neighbours.properties.constant.get(key) is None else: assert g.vertex(1).properties.constant.get(key) == value assert g.vertices.properties.constant.get(key) == [value] - assert g.vertices.out_neighbours().properties.constant.get(key) == [[value]] + assert g.vertices.out_neighbours.properties.constant.get(key) == [[value]] time_static_property_test(1, "static prop", 123) time_static_property_test(100, "static prop", 123) @@ -448,31 +445,31 @@ def time_property_test(time, key, value): if value is None: assert gg.vertex(1).properties.get(key) is None assert gg.vertices.properties.get(key) is None - assert gg.vertices.out_neighbours().properties.get(key) is None + assert gg.vertices.out_neighbours.properties.get(key) is None else: assert gg.vertex(1).properties.get(key) == value assert gg.vertices.properties.get(key) == [value] - assert gg.vertices.out_neighbours().properties.get(key) == [[value]] + assert gg.vertices.out_neighbours.properties.get(key) == [[value]] def property_test(key, value): if value is None: assert g.vertex(1).properties.get(key) is None assert g.vertices.properties.get(key) is None - assert g.vertices.out_neighbours().properties.get(key) is None + assert g.vertices.out_neighbours.properties.get(key) is None else: assert g.vertex(1).properties.get(key) == value assert g.vertices.properties.get(key) == [value] - assert g.vertices.out_neighbours().properties.get(key) == [[value]] + assert g.vertices.out_neighbours.properties.get(key) == [[value]] def no_static_property_test(key, value): if value is None: assert g.vertex(1).properties.temporal.get(key) is None assert g.vertices.properties.temporal.get(key) is None - assert g.vertices.out_neighbours().properties.temporal.get(key) is None + assert g.vertices.out_neighbours.properties.temporal.get(key) is None else: assert g.vertex(1).properties.temporal.get(key).value() == value assert g.vertices.properties.temporal.get(key).value() == [value] - assert g.vertices.out_neighbours().properties.temporal.get(key).value() == [ + assert g.vertices.out_neighbours.properties.temporal.get(key).value() == [ [value] ] @@ -498,7 +495,7 @@ def no_static_property_test(key, value): "prop 4": [True], "static prop": [123], } - assert g.vertices.out_neighbours().properties == { + assert g.vertices.out_neighbours.properties == { "prop 2": [[0.9]], "prop 3": [["hello"]], "prop 1": [[2]], @@ -518,7 +515,7 @@ def no_static_property_test(key, value): "prop 1": [2], "prop 4": [True], } - assert g.vertices.out_neighbours().properties.temporal.latest() == { + assert g.vertices.out_neighbours.properties.temporal.latest() == { "prop 2": [[0.9]], "prop 3": [["hello"]], "prop 1": [[2]], @@ -539,7 +536,7 @@ def no_static_property_test(key, value): "static prop": [123], "prop 3": ["hi"], } - assert g.at(2).vertices.out_neighbours().properties == { + assert g.at(2).vertices.out_neighbours.properties == { "prop 1": [[2]], "prop 4": [[False]], "prop 2": [[0.6]], @@ -560,7 +557,7 @@ def no_static_property_test(key, value): "prop 4": [[(1, True), (2, False), (3, True)]], "prop 2": [[(2, 0.6), (3, 0.9)]], } - assert g.vertices.out_neighbours().properties.temporal == { + assert g.vertices.out_neighbours.properties.temporal == { "prop 3": [[[(1, "hi"), (3, "hello")]]], "prop 1": [[[(1, 1), (2, 2)]]], "prop 4": [[[(1, True), (2, False), (3, True)]]], @@ -579,7 +576,7 @@ def no_static_property_test(key, value): "prop 1": [[(1, 1), (2, 2)]], "prop 3": [[(1, "hi")]], } - assert g.at(2).vertices.out_neighbours().properties.temporal == { + assert g.at(2).vertices.out_neighbours.properties.temporal == { "prop 2": [[[(2, 0.6)]]], "prop 4": [[[(1, True), (2, False)]]], "prop 1": [[[(1, 1), (2, 2)]]], @@ -590,13 +587,13 @@ def no_static_property_test(key, value): expected_names = sorted(["prop 4", "prop 1", "prop 2", "prop 3", "static prop"]) assert sorted(g.vertex(1).properties.keys()) == expected_names assert sorted(g.vertices.properties.keys()) == expected_names - assert sorted(g.vertices.out_neighbours().properties.keys()) == expected_names + assert sorted(g.vertices.out_neighbours.properties.keys()) == expected_names expected_names_no_static = sorted(["prop 4", "prop 1", "prop 2", "prop 3"]) assert sorted(g.vertex(1).properties.temporal.keys()) == expected_names_no_static assert sorted(g.vertices.properties.temporal.keys()) == expected_names_no_static assert ( - sorted(g.vertices.out_neighbours().properties.temporal.keys()) + sorted(g.vertices.out_neighbours.properties.temporal.keys()) == expected_names_no_static ) @@ -610,50 +607,50 @@ def no_static_property_test(key, value): == expected_names_no_static_at_1 ) assert ( - sorted(g.at(1).vertices.out_neighbours().properties.temporal.keys()) + sorted(g.at(1).vertices.out_neighbours.properties.temporal.keys()) == expected_names_no_static_at_1 ) # testing has_property assert "prop 4" in g.vertex(1).properties assert "prop 4" in g.vertices.properties - assert "prop 4" in g.vertices.out_neighbours().properties + assert "prop 4" in g.vertices.out_neighbours.properties assert "prop 2" in g.vertex(1).properties assert "prop 2" in g.vertices.properties - assert "prop 2" in g.vertices.out_neighbours().properties + assert "prop 2" in g.vertices.out_neighbours.properties assert "prop 5" not in g.vertex(1).properties assert "prop 5" not in g.vertices.properties - assert "prop 5" not in g.vertices.out_neighbours().properties + assert "prop 5" not in g.vertices.out_neighbours.properties assert "prop 2" not in g.at(1).vertex(1).properties assert "prop 2" not in g.at(1).vertices.properties - assert "prop 2" not in g.at(1).vertices.out_neighbours().properties + assert "prop 2" not in g.at(1).vertices.out_neighbours.properties assert "static prop" in g.vertex(1).properties assert "static prop" in g.vertices.properties - assert "static prop" in g.vertices.out_neighbours().properties + assert "static prop" in g.vertices.out_neighbours.properties assert "static prop" in g.at(1).vertex(1).properties assert "static prop" in g.at(1).vertices.properties - assert "static prop" in g.at(1).vertices.out_neighbours().properties + assert "static prop" in g.at(1).vertices.out_neighbours.properties assert "static prop" not in g.at(1).vertex(1).properties.temporal assert "static prop" not in g.at(1).vertices.properties.temporal - assert "static prop" not in g.at(1).vertices.out_neighbours().properties.temporal + assert "static prop" not in g.at(1).vertices.out_neighbours.properties.temporal assert "static prop" in g.vertex(1).properties.constant assert "static prop" in g.vertices.properties.constant - assert "static prop" in g.vertices.out_neighbours().properties.constant + assert "static prop" in g.vertices.out_neighbours.properties.constant assert "prop 2" not in g.vertex(1).properties.constant assert "prop 2" not in g.vertices.properties.constant - assert "prop 2" not in g.vertices.out_neighbours().properties.constant + assert "prop 2" not in g.vertices.out_neighbours.properties.constant assert "static prop" in g.at(1).vertex(1).properties.constant assert "static prop" in g.at(1).vertices.properties.constant - assert "static prop" in g.at(1).vertices.out_neighbours().properties.constant + assert "static prop" in g.at(1).vertices.out_neighbours.properties.constant def test_edge_properties(): @@ -781,7 +778,7 @@ def test_exploded_edge_time(): his = e.history() exploded_his = [] for ee in e.explode(): - exploded_his.append(ee.time()) + exploded_his.append(ee.time) assert his == exploded_his @@ -822,8 +819,8 @@ def test_algorithms(): def test_graph_time_api(): g = create_graph() - earliest_time = g.earliest_time() - latest_time = g.latest_time() + earliest_time = g.earliest_time + latest_time = g.latest_time assert len(list(g.rolling(1))) == latest_time - earliest_time + 1 assert len(list(g.expanding(2))) == math.ceil((latest_time + 1 - earliest_time) / 2) @@ -912,9 +909,9 @@ def test_all_neighbours_window(): view = g.at(2) v = view.vertex(2) - assert list(v.window(0, 2).in_neighbours().id()) == [1] - assert list(v.window(0, 2).out_neighbours().id()) == [3] - assert list(v.window(0, 2).neighbours().id()) == [1, 3] + assert list(v.window(0, 2).in_neighbours.id) == [1] + assert list(v.window(0, 2).out_neighbours.id) == [3] + assert list(v.window(0, 2).neighbours.id) == [1, 3] def test_all_degrees_window(): @@ -952,27 +949,31 @@ def test_all_edge_window(): view = g.at(4) v = view.vertex(2) - assert sorted(v.window(0, 4).in_edges().src().id()) == [1, 3, 4] - assert sorted(v.window(t_end=4).in_edges().src().id()) == [1, 3, 4] - assert sorted(v.window(t_start=2).in_edges().src().id()) == [3, 4] - assert sorted(v.window(0, 4).out_edges().dst().id()) == [3] - assert sorted(v.window(t_end=3).out_edges().dst().id()) == [3] - assert sorted(v.window(t_start=2).out_edges().dst().id()) == [4] - assert sorted((e.src().id(), e.dst().id()) for e in v.window(0, 4).edges()) == [ + assert sorted(v.window(0, 4).in_edges.src.id) == [1, 3, 4] + assert sorted(v.window(t_end=4).in_edges.src.id) == [1, 3, 4] + assert sorted(v.window(t_start=2).in_edges.src.id) == [3, 4] + assert sorted(v.window(0, 4).out_edges.dst.id) == [3] + assert sorted(v.window(t_end=3).out_edges.dst.id) == [3] + assert sorted(v.window(t_start=2).out_edges.dst.id) == [4] + assert sorted((e.src.id, e.dst.id) for e in v.window(0, 4).edges) == [ + (1, 2), + (2, 3), + (3, 2), + (4, 2), + ] + assert sorted((e.src.id, e.dst.id) for e in v.window(t_end=4).edges) == [ (1, 2), (2, 3), (3, 2), (4, 2), ] - assert sorted((e.src().id(), e.dst().id()) for e in v.window(t_end=4).edges()) == [ + assert sorted((e.src.id, e.dst.id) for e in v.window(t_start=1).edges) == [ (1, 2), (2, 3), + (2, 4), (3, 2), (4, 2), ] - assert sorted( - (e.src().id(), e.dst().id()) for e in v.window(t_start=1).edges() - ) == [(1, 2), (2, 3), (2, 4), (3, 2), (4, 2)] def test_static_prop_change(): @@ -1031,28 +1032,28 @@ def test_edge_time_apis(): e = g.edge(1, 2) for e in e.expanding(1): - assert e.src().name() == "1" - assert e.dst().name() == "2" + assert e.src.name == "1" + assert e.dst.name == "2" ls = [] - for e in v.edges(): - ls.append(e.src().name()) - ls.append(e.dst().name()) + for e in v.edges: + ls.append(e.src.name) + ls.append(e.dst.name) assert ls == ["1", "2", "1", "5"] v = g.vertex(2) ls = [] - for e in v.in_edges(): - ls.append(e.src().name()) - ls.append(e.dst().name()) + for e in v.in_edges: + ls.append(e.src.name) + ls.append(e.dst.name) assert ls == ["1", "2"] ls = [] - for e in v.out_edges(): - ls.append(e.src().name()) - ls.append(e.dst().name()) + for e in v.out_edges: + ls.append(e.src.name) + ls.append(e.dst.name) assert ls == ["2", "4"] @@ -1066,13 +1067,13 @@ def test_edge_earliest_latest_time(): g.add_edge(1, 1, 3, {}) g.add_edge(2, 1, 3, {}) - assert g.edge(1, 2).earliest_time() == 0 - assert g.edge(1, 2).latest_time() == 2 + assert g.edge(1, 2).earliest_time == 0 + assert g.edge(1, 2).latest_time == 2 - assert list(g.vertex(1).edges().earliest_time()) == [0, 0] - assert list(g.vertex(1).edges().latest_time()) == [2, 2] - assert list(g.vertex(1).at(1).edges().earliest_time()) == [0, 0] - assert list(g.vertex(1).at(1).edges().latest_time()) == [1, 1] + assert list(g.vertex(1).edges.earliest_time) == [0, 0] + assert list(g.vertex(1).edges.latest_time) == [2, 2] + assert list(g.vertex(1).at(1).edges.earliest_time) == [0, 0] + assert list(g.vertex(1).at(1).edges.latest_time) == [1, 1] def test_vertex_earliest_time(): @@ -1082,11 +1083,11 @@ def test_vertex_earliest_time(): g.add_vertex(2, 1, {}) view = g.at(1) - assert view.vertex(1).earliest_time() == 0 - assert view.vertex(1).latest_time() == 1 + assert view.vertex(1).earliest_time == 0 + assert view.vertex(1).latest_time == 1 view = g.at(3) - assert view.vertex(1).earliest_time() == 0 - assert view.vertex(1).latest_time() == 2 + assert view.vertex(1).earliest_time == 0 + assert view.vertex(1).latest_time == 2 def test_vertex_history(): @@ -1348,9 +1349,9 @@ def test_layer(): g.add_edge(0, 1, 3, layer="layer1") g.add_edge(0, 1, 4, layer="layer2") - assert g.default_layer().num_edges() == 1 - assert g.layers(["layer1"]).num_edges() == 1 - assert g.layers(["layer2"]).num_edges() == 1 + assert g.default_layer().count_edges() == 1 + assert g.layers(["layer1"]).count_edges() == 1 + assert g.layers(["layer2"]).count_edges() == 1 def test_layer_vertex(): @@ -1359,21 +1360,15 @@ def test_layer_vertex(): g.add_edge(0, 1, 2, layer="layer1") g.add_edge(0, 2, 3, layer="layer2") g.add_edge(3, 2, 4, layer="layer1") - neighbours = g.layers(["layer1", "layer2"]).vertex(1).neighbours().collect() - assert sorted(neighbours[0].layers(["layer2"]).edges().id()) == [(2, 3)] - assert sorted(g.layers(["layer2"]).vertex(neighbours[0].name()).edges().id()) == [ - (2, 3) - ] - assert sorted(g.layers(["layer1"]).vertex(neighbours[0].name()).edges().id()) == [ + neighbours = g.layers(["layer1", "layer2"]).vertex(1).neighbours.collect() + assert sorted(neighbours[0].layers(["layer2"]).edges.id) == [(2, 3)] + assert sorted(g.layers(["layer2"]).vertex(neighbours[0].name).edges.id) == [(2, 3)] + assert sorted(g.layers(["layer1"]).vertex(neighbours[0].name).edges.id) == [ (1, 2), (2, 4), ] - assert sorted(g.layers(["layer1"]).edges().id()) == [(1, 2), (2, 4)] - assert sorted(g.layers(["layer1", "layer2"]).edges().id()) == [ - (1, 2), - (2, 3), - (2, 4), - ] + assert sorted(g.layers(["layer1"]).edges().id) == [(1, 2), (2, 4)] + assert sorted(g.layers(["layer1", "layer2"]).edges().id) == [(1, 2), (2, 3), (2, 4)] def test_rolling_as_iterable(): @@ -1386,8 +1381,8 @@ def test_rolling_as_iterable(): # a normal operation is reusing the object returned by rolling twice, to get both results and an index. # So the following should work fine: - n_vertices = [w.num_vertices() for w in rolling] - time_index = [w.start() for w in rolling] + n_vertices = [w.count_vertices() for w in rolling] + time_index = [w.start for w in rolling] assert n_vertices == [1, 0, 0, 1] assert time_index == [1, 2, 3, 4] @@ -1399,8 +1394,8 @@ def test_layer_name(): g.add_edge(0, 0, 1) g.add_edge(0, 0, 2, layer="awesome layer") - assert g.edge(0, 1).layer_names() == ["_default"] - assert g.edge(0, 2).layer_names() == ["awesome layer"] + assert g.edge(0, 1).layer_names == ["_default"] + assert g.edge(0, 2).layer_names == ["awesome layer"] def test_window_size(): @@ -1452,19 +1447,19 @@ def test_date_time(): g.add_edge("2014-02-04", 1, 4) g.add_edge("2014-02-05", 1, 2) - assert g.earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert g.latest_date_time() == datetime.datetime(2014, 2, 5, 0, 0) + assert g.earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert g.latest_date_time == datetime.datetime(2014, 2, 5, 0, 0) e = g.edge(1, 3) exploded_edges = [] for edge in e.explode(): - exploded_edges.append(edge.date_time()) + exploded_edges.append(edge.date_time) assert exploded_edges == [datetime.datetime(2014, 2, 3)] - assert g.edge(1, 2).earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert g.edge(1, 2).latest_date_time() == datetime.datetime(2014, 2, 5, 0, 0) + assert g.edge(1, 2).earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert g.edge(1, 2).latest_date_time == datetime.datetime(2014, 2, 5, 0, 0) - assert g.vertex(1).earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert g.vertex(1).latest_date_time() == datetime.datetime(2014, 2, 5, 0, 0) + assert g.vertex(1).earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert g.vertex(1).latest_date_time == datetime.datetime(2014, 2, 5, 0, 0) def test_date_time_window(): @@ -1479,22 +1474,22 @@ def test_date_time_window(): view = g.window("2014-02-02", "2014-02-04") view2 = g.window("2014-02-02", "2014-02-05") - assert view.start_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view.end_date_time() == datetime.datetime(2014, 2, 4, 0, 0) + assert view.start_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view.end_date_time == datetime.datetime(2014, 2, 4, 0, 0) - assert view.earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view.latest_date_time() == datetime.datetime(2014, 2, 3, 0, 0) + assert view.earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view.latest_date_time == datetime.datetime(2014, 2, 3, 0, 0) - assert view2.edge(1, 2).start_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view2.edge(1, 2).end_date_time() == datetime.datetime(2014, 2, 5, 0, 0) + assert view2.edge(1, 2).start_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view2.edge(1, 2).end_date_time == datetime.datetime(2014, 2, 5, 0, 0) - assert view.vertex(1).earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view.vertex(1).latest_date_time() == datetime.datetime(2014, 2, 3, 0, 0) + assert view.vertex(1).earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view.vertex(1).latest_date_time == datetime.datetime(2014, 2, 3, 0, 0) e = view.edge(1, 2) exploded_edges = [] for edge in e.explode(): - exploded_edges.append(edge.date_time()) + exploded_edges.append(edge.date_time) assert exploded_edges == [datetime.datetime(2014, 2, 2)] @@ -1509,17 +1504,17 @@ def test_datetime_add_vertex(): view = g.window("2014-02-02", "2014-02-04") view2 = g.window("2014-02-02", "2014-02-05") - assert view.start_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view.end_date_time() == datetime.datetime(2014, 2, 4, 0, 0) + assert view.start_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view.end_date_time == datetime.datetime(2014, 2, 4, 0, 0) - assert view2.earliest_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view2.latest_date_time() == datetime.datetime(2014, 2, 4, 0, 0) + assert view2.earliest_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view2.latest_date_time == datetime.datetime(2014, 2, 4, 0, 0) - assert view2.vertex(1).start_date_time() == datetime.datetime(2014, 2, 2, 0, 0) - assert view2.vertex(1).end_date_time() == datetime.datetime(2014, 2, 5, 0, 0) + assert view2.vertex(1).start_date_time == datetime.datetime(2014, 2, 2, 0, 0) + assert view2.vertex(1).end_date_time == datetime.datetime(2014, 2, 5, 0, 0) - assert view.vertex(2).earliest_date_time() == datetime.datetime(2014, 2, 3, 0, 0) - assert view.vertex(2).latest_date_time() == datetime.datetime(2014, 2, 3, 0, 0) + assert view.vertex(2).earliest_date_time == datetime.datetime(2014, 2, 3, 0, 0) + assert view.vertex(2).latest_date_time == datetime.datetime(2014, 2, 3, 0, 0) def test_equivalent_vertices_edges_and_sets(): @@ -1532,9 +1527,9 @@ def test_equivalent_vertices_edges_and_sets(): g.add_edge(1, 2, 3) assert g.vertex(1) == g.vertex(1) - assert list(g.vertex(1).neighbours())[0] == list(g.vertex(3).neighbours())[0] - assert set(g.vertex(1).neighbours()) == set(g.vertex(3).neighbours()) - assert set(g.vertex(1).out_edges()) == set(g.vertex(2).in_edges()) + assert list(g.vertex(1).neighbours)[0] == list(g.vertex(3).neighbours)[0] + assert set(g.vertex(1).neighbours) == set(g.vertex(3).neighbours) + assert set(g.vertex(1).out_edges) == set(g.vertex(2).in_edges) assert g.edge(1, 1) == g.edge(1, 1) @@ -1556,7 +1551,7 @@ def test_subgraph(): mg = subgraph.materialize() assert mg.vertices.collect()[0].properties["type"] == "wallet" - assert mg.vertices.collect()[0].name() == "1" + assert mg.vertices.collect()[0].name == "1" props = {"prop 4": 11, "prop 5": "world", "prop 6": False} mg.add_property(1, props) @@ -1594,14 +1589,14 @@ def test_materialize_graph(): assert mg.vertex(4).properties.constant.get("abc") == "xyz" assert mg.vertex(1).history() == [-1, 0, 1, 2] assert mg.vertex(4).history() == [6, 8] - assert mg.vertices().id().collect() == [1, 2, 3, 4] - assert set(mg.edges().id()) == {(1, 1), (1, 2), (1, 3), (2, 1), (3, 2), (2, 4)} - assert g.vertices.id().collect() == mg.vertices.id().collect() - assert set(g.edges().id()) == set(mg.edges().id()) + assert mg.vertices.id.collect() == [1, 2, 3, 4] + assert set(mg.edges().id) == {(1, 1), (1, 2), (1, 3), (2, 1), (3, 2), (2, 4)} + assert g.vertices.id.collect() == mg.vertices.id.collect() + assert set(g.edges().id) == set(mg.edges().id) assert mg.vertex(1).properties.constant == {} assert mg.vertex(4).properties.constant == {"abc": "xyz"} - assert g.edge(1, 2).id() == (1, 2) - assert mg.edge(1, 2).id() == (1, 2) + assert g.edge(1, 2).id == (1, 2) + assert mg.edge(1, 2).id == (1, 2) assert mg.has_edge(1, 2) assert g.has_edge(1, 2) assert mg.has_edge(2, 1) @@ -1622,7 +1617,7 @@ def test_deletions(): for e in edges[1:]: assert g.window(start=11).has_edge(e[1], e[2]) - assert list(g.edge(edges[0][1], edges[0][2]).explode().latest_time()) == [10] + assert list(g.edge(edges[0][1], edges[0][2]).explode().latest_time) == [10] def test_load_from_pandas(): @@ -1640,12 +1635,12 @@ def test_load_from_pandas(): g = Graph.load_from_pandas(df, "src", "dst", "time", ["weight", "marbles"]) - assert g.vertices().id().collect() == [1, 2, 3, 4, 5, 6] + assert g.vertices.id.collect() == [1, 2, 3, 4, 5, 6] edges = [] for e in g.edges(): weight = e["weight"] marbles = e["marbles"] - edges.append((e.src().id(), e.dst().id(), weight, marbles)) + edges.append((e.src.id, e.dst.id, weight, marbles)) assert edges == [ (1, 2, 1.0, "red"), @@ -1681,12 +1676,12 @@ def test_load_from_pandas_into_existing_graph(): g.load_edges_from_pandas(edges_df, "src", "dst", "time", ["weight", "marbles"]) - assert g.vertices().id().collect() == [1, 2, 3, 4, 5, 6] + assert g.vertices.id.collect() == [1, 2, 3, 4, 5, 6] edges = [] for e in g.edges(): weight = e["weight"] marbles = e["marbles"] - edges.append((e.src().id(), e.dst().id(), weight, marbles)) + edges.append((e.src.id, e.dst.id, weight, marbles)) assert edges == [ (1, 2, 1.0, "red"), @@ -1697,9 +1692,9 @@ def test_load_from_pandas_into_existing_graph(): ] vertices = [] - for v in g.vertices(): + for v in g.vertices: name = v["name"] - vertices.append((v.id(), name)) + vertices.append((v.id, name)) assert vertices == [ (1, "Alice"), @@ -1742,12 +1737,12 @@ def test_load_from_pandas_vertices(): vertex_props=["name"], ) - assert g.vertices().id().collect() == [1, 2, 3, 4, 5, 6] + assert g.vertices.id.collect() == [1, 2, 3, 4, 5, 6] edges = [] for e in g.edges(): weight = e["weight"] marbles = e["marbles"] - edges.append((e.src().id(), e.dst().id(), weight, marbles)) + edges.append((e.src.id, e.dst.id, weight, marbles)) assert edges == [ (1, 2, 1.0, "red"), @@ -1758,9 +1753,9 @@ def test_load_from_pandas_vertices(): ] vertices = [] - for v in g.vertices(): + for v in g.vertices: name = v["name"] - vertices.append((v.id(), name)) + vertices.append((v.id, name)) assert vertices == [ (1, "Alice"), @@ -1807,7 +1802,7 @@ def test_load_from_pandas_with_types(): ["name"], shared_const_props={"type": "Person", "tag": "test_tag"}, ) - assert g.vertices().properties.constant.get("type").collect() == [ + assert g.vertices.properties.constant.get("type").collect() == [ "Person", "Person", "Person", @@ -1815,7 +1810,7 @@ def test_load_from_pandas_with_types(): "Person", "Person", ] - assert g.vertices().properties.constant.get("tag").collect() == [ + assert g.vertices.properties.constant.get("tag").collect() == [ "test_tag", "test_tag", "test_tag", @@ -1828,7 +1823,7 @@ def test_load_from_pandas_with_types(): g.load_vertices_from_pandas( vertices_df, "id", "time", ["name"], const_props=["type"] ) - assert g.vertices().properties.constant.get("type").collect() == [ + assert g.vertices.properties.constant.get("type").collect() == [ "Person 1", "Person 2", "Person 3", @@ -1849,7 +1844,7 @@ def test_load_from_pandas_with_types(): layer="test_layer", ) - assert g.layers(["test_layer"]).edges().src().id().collect() == [1, 2, 3, 4, 5] + assert g.layers(["test_layer"]).edges().src.id.collect() == [1, 2, 3, 4, 5] assert g.edges().properties.constant.get("type").collect() == [ {"test_layer": "Edge"}, {"test_layer": "Edge"}, @@ -1876,14 +1871,14 @@ def test_load_from_pandas_with_types(): g.load_edges_from_pandas( edges_df, "src", "dst", "time", ["weight", "marbles"], layer_in_df="layers" ) - assert g.layers(["layer 1"]).edges().src().id().collect() == [1] - assert g.layers(["layer 1", "layer 2"]).edges().src().id().collect() == [1, 2] - assert g.layers(["layer 1", "layer 2", "layer 3"]).edges().src().id().collect() == [ + assert g.layers(["layer 1"]).edges().src.id.collect() == [1] + assert g.layers(["layer 1", "layer 2"]).edges().src.id.collect() == [1, 2] + assert g.layers(["layer 1", "layer 2", "layer 3"]).edges().src.id.collect() == [ 1, 2, 3, ] - assert g.layers(["layer 1", "layer 4", "layer 5"]).edges().src().id().collect() == [ + assert g.layers(["layer 1", "layer 4", "layer 5"]).edges().src.id.collect() == [ 1, 4, 5, @@ -1901,7 +1896,7 @@ def test_load_from_pandas_with_types(): vertex_props=["name"], vertex_shared_const_props={"type": "Person"}, ) - assert g.vertices().properties.constant.get("type").collect() == [ + assert g.vertices.properties.constant.get("type").collect() == [ "Person", "Person", "Person", @@ -1909,7 +1904,7 @@ def test_load_from_pandas_with_types(): "Person", "Person", ] - assert g.layers(["test_layer"]).edges().src().id().collect() == [1, 2, 3, 4, 5] + assert g.layers(["test_layer"]).edges().src.id.collect() == [1, 2, 3, 4, 5] g = Graph.load_from_pandas( edges_df, @@ -1923,7 +1918,7 @@ def test_load_from_pandas_with_types(): vertex_props=["name"], vertex_const_props=["type"], ) - assert g.vertices().properties.constant.get("type").collect() == [ + assert g.vertices.properties.constant.get("type").collect() == [ "Person 1", "Person 2", "Person 3", @@ -1931,14 +1926,14 @@ def test_load_from_pandas_with_types(): "Person 5", "Person 6", ] - assert g.layers(["layer 1"]).edges().src().id().collect() == [1] - assert g.layers(["layer 1", "layer 2"]).edges().src().id().collect() == [1, 2] - assert g.layers(["layer 1", "layer 2", "layer 3"]).edges().src().id().collect() == [ + assert g.layers(["layer 1"]).edges().src.id.collect() == [1] + assert g.layers(["layer 1", "layer 2"]).edges().src.id.collect() == [1, 2] + assert g.layers(["layer 1", "layer 2", "layer 3"]).edges().src.id.collect() == [ 1, 2, 3, ] - assert g.layers(["layer 1", "layer 4", "layer 5"]).edges().src().id().collect() == [ + assert g.layers(["layer 1", "layer 4", "layer 5"]).edges().src.id.collect() == [ 1, 4, 5, @@ -1960,7 +1955,7 @@ def test_load_from_pandas_with_types(): g.load_vertex_props_from_pandas( vertices_df, "id", const_props=["type"], shared_const_props={"tag": "test_tag"} ) - assert g.vertices().properties.constant.get("type").collect() == [ + assert g.vertices.properties.constant.get("type").collect() == [ "Person 1", "Person 2", "Person 3", @@ -1968,7 +1963,7 @@ def test_load_from_pandas_with_types(): "Person 5", "Person 6", ] - assert g.vertices().properties.constant.get("tag").collect() == [ + assert g.vertices.properties.constant.get("tag").collect() == [ "test_tag", "test_tag", "test_tag", @@ -2016,8 +2011,7 @@ def test_layers_earliest_time(): e = g.add_edge(1, 1, 2, layer="test") e = g.edge(1, 2) print(e) - assert e.earliest_time() == 1 - + assert e.earliest_time == 1 def test_layers_earliest_time(): @@ -2025,8 +2019,7 @@ def test_layers_earliest_time(): e = g.add_edge(1, 1, 2, layer="test") e = g.edge(1, 2) print(e) - assert e.earliest_time() == 1 - + assert e.earliest_time == 1 def test_edge_explode_layers(): @@ -2037,13 +2030,13 @@ def test_edge_explode_layers(): g.add_edge(1, 2, 1, {"layer": 2}, layer="2") layered_edges = g.edge(1, 2).explode_layers() - e_layers = [ee.layer_names() for ee in layered_edges] + e_layers = [ee.layer_names for ee in layered_edges] e_layer_prop = [[str(ee.properties["layer"])] for ee in layered_edges] assert e_layers == e_layer_prop print(e_layers) - nested_layered_edges = g.vertices.out_edges().explode_layers() - e_layers = [[ee.layer_names() for ee in edges] for edges in nested_layered_edges] + nested_layered_edges = g.vertices.out_edges.explode_layers() + e_layers = [[ee.layer_names for ee in edges] for edges in nested_layered_edges] e_layer_prop = [ [[str(ee.properties["layer"])] for ee in layered_edges] for layered_edges in nested_layered_edges @@ -2051,11 +2044,11 @@ def test_edge_explode_layers(): assert e_layers == e_layer_prop print(e_layers) - print(g.vertices.out_neighbours().collect()) - nested_layered_edges = g.vertices.out_neighbours().out_edges().explode_layers() + print(g.vertices.out_neighbours.collect) + nested_layered_edges = g.vertices.out_neighbours.out_edges.explode_layers() print(nested_layered_edges) e_layers = [ - [ee.layer_names() for ee in layered_edges] + [ee.layer_names for ee in layered_edges] for layered_edges in nested_layered_edges ] e_layer_prop = [ diff --git a/python/tests/test_iterables.py b/python/tests/test_iterables.py index 6425b1dc29..7a0045b700 100644 --- a/python/tests/test_iterables.py +++ b/python/tests/test_iterables.py @@ -29,16 +29,16 @@ def test_pyprophistvaluelist(): g.add_edge(time, src, dst, {"value_dec": val}) v = g.vertex("1") - res = sorted(v.out_edges().properties.temporal.get("value_dec").values().sum()) + res = sorted(v.out_edges.properties.temporal.get("value_dec").values().sum()) assert res == [2, 20, 20] - res = sorted(v.out_edges().properties.temporal.get("value_dec").values().count()) + res = sorted(v.out_edges.properties.temporal.get("value_dec").values().count()) assert res == [1, 1, 2] - res = v.out_edges().properties.temporal.get("value_dec").values().sum().sum() + res = v.out_edges.properties.temporal.get("value_dec").values().sum().sum() assert res == 42 - res = v.out_edges().properties.temporal.get("value_dec").values().count().sum() + res = v.out_edges.properties.temporal.get("value_dec").values().count().sum() assert res == 4 g = Graph() @@ -59,20 +59,20 @@ def test_pyprophistvaluelist(): for src, dst, val, time in edges_str: g.add_edge(time, src, dst, {"value_dec": val}) v = g.vertex("1") - res = ( - v.out_edges().properties.temporal.get("value_dec").values() - ) # PyPropHistValueList([[10, 10, 10], [20], [2]]) + res = v.out_edges.properties.temporal.get( + "value_dec" + ).values() # PyPropHistValueList([[10, 10, 10], [20], [2]]) assert res.sum() == [120, 20, 8] assert res.min() == [10, 20, 1] assert res.max() == [100, 20, 5] - assert sorted(res.len()) == [1, 3, 3] assert sorted(res.count()) == [1, 3, 3] assert res.median() == [10, 20, 2] assert list(res.mean()) == [40, 20, 8 / 3] assert list(res.average()) == [40, 20, 8 / 3] + def test_empty_lists(): - #This checks that empty lists are handled correctly on all python property types + # This checks that empty lists are handled correctly on all python property types g = Graph() edges_str = [ ("1", "2", 10, 1), @@ -87,8 +87,24 @@ def test_empty_lists(): ] for src, dst, val, time in edges_str: g.add_edge(time, src, dst, {"value_dec": val}) - assert(g.vertices().out_edges().properties.temporal.get("value_dec").values().median().median().median() == 5) - assert(g.vertices().out_edges().properties.temporal.get("value_dec").values().mean().mean().mean() == 1.3333333333333335) + assert ( + g.vertices + .out_edges.properties.temporal.get("value_dec") + .values() + .median() + .median() + .median() + == 5 + ) + assert ( + g.vertices + .out_edges.properties.temporal.get("value_dec") + .values() + .mean() + .mean() + .mean() + == 1.3333333333333335 + ) def test_propiterable(): @@ -111,7 +127,7 @@ def test_propiterable(): g.add_edge(time, src, dst, {"value_dec": val}) v = g.vertex("1") - result = v.out_edges().properties.temporal.get("value_dec").values().flatten() + result = v.out_edges.properties.temporal.get("value_dec").values().flatten() assert sorted(result) == [2, 10, 10, 10, 20] assert result.sum() == 52 assert result.median() == 10 @@ -119,38 +135,38 @@ def test_propiterable(): assert result.average() == 10.4 assert result.min() == 2 assert result.max() == 20 - assert result.len() == 5 + assert result.count() == 5 - assert v.out_edges().properties.get("value_dec").sum() == 32 - assert v.out_edges().properties.get("value_dec").median() == 10 + assert v.out_edges.properties.get("value_dec").sum() == 32 + assert v.out_edges.properties.get("value_dec").median() == 10 - total = g.vertices.in_edges().properties.get("value_dec").sum() + total = g.vertices.in_edges.properties.get("value_dec").sum() assert sorted(total) == [2, 6, 12, 15, 20] - total = g.vertices.edges().properties.get("value_dec").sum() + total = g.vertices.edges.properties.get("value_dec").sum() assert sorted(total) == [2, 17, 18, 35, 38] total = dict( - zip(g.vertices().id(), g.vertices.out_edges().properties.get("value_dec").sum()) + zip(g.vertices.id, g.vertices.out_edges.properties.get("value_dec").sum()) ) assert total == {1: 32, 2: 5, 3: 3, 4: 15, 5: None} - total = g.vertices.out_edges().properties.get("value_dec").sum().sum() + total = g.vertices.out_edges.properties.get("value_dec").sum().sum() assert total == 55 - total = g.vertices.out_edges().properties.get("value_dec").sum().median() + total = g.vertices.out_edges.properties.get("value_dec").sum().median() assert total == 5 - total = g.vertices.out_edges().properties.get("value_dec").sum().drop_none() + total = g.vertices.out_edges.properties.get("value_dec").sum().drop_none() assert sorted(total) == [3, 5, 15, 32] - total = g.vertices.out_edges().properties.get("value_dec").median() + total = g.vertices.out_edges.properties.get("value_dec").median() assert list(total) == [10, 5, 10, 2, None] - total = g.vertex("1").in_edges().properties.get("value_dec").sum() + total = g.vertex("1").in_edges.properties.get("value_dec").sum() assert total == 6 - total = g.vertex("1").in_edges().properties.get("value_dec").median() + total = g.vertex("1").in_edges.properties.get("value_dec").median() assert total == 5 @@ -176,8 +192,8 @@ def test_pypropvalue_list_listlist(): res = g.edges().properties.get( "value_dec" ) # PyPropValueList([100, 20, 5, 5, 5, 10, 1, 2]) - res_v = v.edges().properties.get("value_dec") # PyPropValueList([100, 5, 20, 1, 5]) - res_ll = g.vertices().edges().properties.get("value_dec") + res_v = v.edges.properties.get("value_dec") # PyPropValueList([100, 5, 20, 1, 5]) + res_ll = g.vertices.edges.properties.get("value_dec") assert res.sum() == 148 assert res_v.sum() == 131 @@ -195,9 +211,9 @@ def test_pypropvalue_list_listlist(): assert res_v.max() == 100 assert res_ll.max() == [100, 100, 20, 10, 5] - assert res.count() == res.len() == 8 - assert res_v.count() == res_v.len() == 5 - assert res_ll.count() == res_ll.len() == [5, 3, 3, 4, 1] + assert res.count() == 8 + assert res_v.count() == 5 + assert res_ll.count() == [5, 3, 3, 4, 1] assert res.mean() == res.average() == 18.5 assert res_v.mean() == res_v.average() == 26.2 @@ -227,11 +243,11 @@ def test_pytemporalprops(): for src, dst, val, time in edges_str: g.add_edge(time, src, dst, {"value_dec": val}) v = g.vertex("1") - res = list(v.out_edges())[0].properties.temporal.get("value_dec") + res = list(v.out_edges)[0].properties.temporal.get("value_dec") assert res.sum() == 120 assert res.min() == (1, 10) assert res.max() == (3, 100) - assert res.count() == res.len() == 3 + assert res.count() == 3 assert res.mean() == res.average() == 40.0 assert res.median() == (2, 10) diff --git a/raphtory-benchmark/benches/common/mod.rs b/raphtory-benchmark/benches/common/mod.rs index f1b4fd5e87..1103a0c47e 100644 --- a/raphtory-benchmark/benches/common/mod.rs +++ b/raphtory-benchmark/benches/common/mod.rs @@ -272,7 +272,7 @@ pub fn run_analysis_benchmarks( let vertices: HashSet = graph.vertices().id().collect(); bench(group, "num_edges", parameter, |b: &mut Bencher| { - b.iter(|| graph.num_edges()) + b.iter(|| graph.count_edges()) }); bench(group, "has_edge_existing", parameter, |b: &mut Bencher| { @@ -301,7 +301,7 @@ pub fn run_analysis_benchmarks( ); bench(group, "num_vertices", parameter, |b: &mut Bencher| { - b.iter(|| graph.num_vertices()) + b.iter(|| graph.count_vertices()) }); bench( diff --git a/raphtory-graphql/src/data.rs b/raphtory-graphql/src/data.rs index 92f07b1692..4c8af62f14 100644 --- a/raphtory-graphql/src/data.rs +++ b/raphtory-graphql/src/data.rs @@ -1,7 +1,6 @@ use parking_lot::RwLock; use raphtory::{ core::Prop, - db::api::view::internal::{DynamicGraph, IntoDynamic}, prelude::{Graph, GraphViewOps, PropertyAdditionOps}, search::IndexedGraph, }; diff --git a/raphtory-graphql/src/model/graph/graph.rs b/raphtory-graphql/src/model/graph/graph.rs index 49193d762a..c6549d02a0 100644 --- a/raphtory-graphql/src/model/graph/graph.rs +++ b/raphtory-graphql/src/model/graph/graph.rs @@ -87,7 +87,7 @@ impl GqlGraph { } async fn layer_names(&self) -> Vec { - self.graph.get_unique_layers() + self.graph.unique_layers() } async fn static_properties(&self) -> Vec { diff --git a/raphtory-graphql/src/model/schema/graph_schema.rs b/raphtory-graphql/src/model/schema/graph_schema.rs index 1c20a58599..2e28a59b42 100644 --- a/raphtory-graphql/src/model/schema/graph_schema.rs +++ b/raphtory-graphql/src/model/schema/graph_schema.rs @@ -23,7 +23,7 @@ impl GraphSchema { .collect_vec(); let layers = graph - .get_unique_layers() + .unique_layers() .iter() .map(|layer_name| graph.layer(layer_name).unwrap().into()) .collect_vec(); diff --git a/raphtory-graphql/src/model/schema/layer_schema.rs b/raphtory-graphql/src/model/schema/layer_schema.rs index 8fd36974eb..a94ad43e17 100644 --- a/raphtory-graphql/src/model/schema/layer_schema.rs +++ b/raphtory-graphql/src/model/schema/layer_schema.rs @@ -21,7 +21,7 @@ impl From> for LayerSchema { impl LayerSchema { /// Returns the name of the layer with this schema async fn name(&self) -> String { - match &self.graph.get_unique_layers()[..] { + match &self.graph.unique_layers()[..] { [layer] => layer.clone(), _ => panic!("Layered graph outputted more than one layer name"), } diff --git a/raphtory/src/algorithms/directed_graph_density.rs b/raphtory/src/algorithms/directed_graph_density.rs index e10e74cf4d..888336a856 100644 --- a/raphtory/src/algorithms/directed_graph_density.rs +++ b/raphtory/src/algorithms/directed_graph_density.rs @@ -35,7 +35,8 @@ use crate::db::api::view::*; /// Measures how dense or sparse a graph is pub fn directed_graph_density(graph: &G) -> f32 { - graph.num_edges() as f32 / (graph.num_vertices() as f32 * (graph.num_vertices() as f32 - 1.0)) + graph.count_edges() as f32 + / (graph.count_vertices() as f32 * (graph.count_vertices() as f32 - 1.0)) } #[cfg(test)] diff --git a/raphtory/src/algorithms/pagerank.rs b/raphtory/src/algorithms/pagerank.rs index 36add9ad8d..828620afee 100644 --- a/raphtory/src/algorithms/pagerank.rs +++ b/raphtory/src/algorithms/pagerank.rs @@ -59,8 +59,8 @@ pub fn unweighted_page_rank( tol: Option, use_l2_norm: bool, ) -> AlgorithmResult> { - let n = g.num_vertices(); - let total_edges = g.num_edges(); + let n = g.count_vertices(); + let total_edges = g.count_edges(); let mut ctx: Context = g.into(); @@ -156,7 +156,7 @@ pub fn unweighted_page_rank( let mut runner: TaskRunner = TaskRunner::new(ctx); - let num_vertices = g.num_vertices(); + let num_vertices = g.count_vertices(); let out: HashMap = runner.run( vec![Job::new(step1)], diff --git a/raphtory/src/core/mod.rs b/raphtory/src/core/mod.rs index cdbf70f70e..bc6cfe9058 100644 --- a/raphtory/src/core/mod.rs +++ b/raphtory/src/core/mod.rs @@ -379,8 +379,8 @@ impl fmt::Display for Prop { Prop::Graph(value) => write!( f, "Graph(num_vertices={}, num_edges={})", - value.num_vertices(), - value.num_edges() + value.count_vertices(), + value.count_edges() ), Prop::List(value) => { write!(f, "{:?}", value) diff --git a/raphtory/src/db/api/view/graph.rs b/raphtory/src/db/api/view/graph.rs index 6b347e2cc4..ad1db49262 100644 --- a/raphtory/src/db/api/view/graph.rs +++ b/raphtory/src/db/api/view/graph.rs @@ -35,24 +35,24 @@ pub trait GraphViewOps: BoxableGraphView + Clone + Sized { vertices: I, ) -> VertexSubgraph; /// Return all the layer ids in the graph - fn get_unique_layers(&self) -> Vec; + fn unique_layers(&self) -> Vec; /// Timestamp of earliest activity in the graph fn earliest_time(&self) -> Option; /// Timestamp of latest activity in the graph fn latest_time(&self) -> Option; /// Return the number of vertices in the graph. - fn num_vertices(&self) -> usize; + fn count_vertices(&self) -> usize; /// Check if the graph is empty. fn is_empty(&self) -> bool { - self.num_vertices() == 0 + self.count_vertices() == 0 } /// Return the number of edges in the graph. - fn num_edges(&self) -> usize; + fn count_edges(&self) -> usize; // Return the number of temporal edges in the graph. - fn num_temporal_edges(&self) -> usize; + fn count_temporal_edges(&self) -> usize; /// Check if the graph contains a vertex `v`. fn has_vertex>(&self, v: T) -> bool; @@ -103,7 +103,7 @@ impl GraphViewOps for G { } /// Return all the layer ids in the graph - fn get_unique_layers(&self) -> Vec { + fn unique_layers(&self) -> Vec { self.get_layer_names_from_ids(self.layer_ids()) } @@ -115,16 +115,16 @@ impl GraphViewOps for G { self.latest_time_global() } - fn num_vertices(&self) -> usize { + fn count_vertices(&self) -> usize { self.vertices_len(self.layer_ids(), self.edge_filter()) } - fn num_temporal_edges(&self) -> usize { + fn count_temporal_edges(&self) -> usize { self.edges().explode().count() } #[inline] - fn num_edges(&self) -> usize { + fn count_edges(&self) -> usize { self.edges_len(self.layer_ids(), self.edge_filter()) } @@ -278,7 +278,7 @@ mod test_exploded_edges { g.add_edge(2, 0, 1, NO_PROPS, None).unwrap(); g.add_edge(3, 0, 1, NO_PROPS, None).unwrap(); - assert_eq!(g.num_temporal_edges(), 4) + assert_eq!(g.count_temporal_edges(), 4) } } diff --git a/raphtory/src/db/api/view/internal/mod.rs b/raphtory/src/db/api/view/internal/mod.rs index ba5d8d0223..8d2d0e459a 100644 --- a/raphtory/src/db/api/view/internal/mod.rs +++ b/raphtory/src/db/api/view/internal/mod.rs @@ -100,8 +100,8 @@ impl Debug for DynamicGraph { write!( f, "DynamicGraph(num_vertices={}, num_edges={})", - self.num_vertices(), - self.num_edges() + self.count_vertices(), + self.count_edges() ) } } diff --git a/raphtory/src/db/graph/graph.rs b/raphtory/src/db/graph/graph.rs index 4bbf59ed70..56ab9af411 100644 --- a/raphtory/src/db/graph/graph.rs +++ b/raphtory/src/db/graph/graph.rs @@ -12,7 +12,7 @@ //! graph.add_vertex(0, "Alice", NO_PROPS).unwrap(); //! graph.add_vertex(1, "Bob", NO_PROPS).unwrap(); //! graph.add_edge(2, "Alice", "Bob", NO_PROPS, None).unwrap(); -//! graph.num_edges(); +//! graph.count_edges(); //! ``` //! @@ -39,7 +39,7 @@ pub(crate) type InternalGraph = InnerTemporalGraph; pub struct Graph(pub Arc); pub fn graph_equal(g1: &G1, g2: &G2) -> bool { - if g1.num_vertices() == g2.num_vertices() && g1.num_edges() == g2.num_edges() { + if g1.count_vertices() == g2.count_vertices() && g1.count_edges() == g2.count_edges() { g1.vertices().id().all(|v| g2.has_vertex(v)) && // all vertices exist in other g1.edges().explode().count() == g2.edges().explode().count() && // same number of exploded edges g1.edges().explode().all(|e| { // all exploded edges exist in other @@ -176,7 +176,7 @@ mod db_tests { .ok(); } - assert_eq!(g.num_vertices(), expected_len) + assert_eq!(g.count_vertices(), expected_len) } #[quickcheck] @@ -190,7 +190,7 @@ mod db_tests { .ok(); } - assert_eq!(g.num_vertices(), expected_len); + assert_eq!(g.count_vertices(), expected_len); vs.iter().all(|name| { let v = g.vertex(name.clone()).unwrap(); @@ -219,8 +219,8 @@ mod db_tests { g.add_edge(t, src, dst, NO_PROPS, None).unwrap(); } - assert_eq!(g.num_vertices(), unique_vertices_count); - assert_eq!(g.num_edges(), unique_edge_count); + assert_eq!(g.count_vertices(), unique_vertices_count); + assert_eq!(g.count_edges(), unique_edge_count); } #[quickcheck] @@ -605,7 +605,7 @@ mod db_tests { assert!(g.has_vertex("haaroon")); assert!(g.has_vertex("hamza")); - assert_eq!(g.num_vertices(), 3); + assert_eq!(g.count_vertices(), 3); } #[test] @@ -634,11 +634,11 @@ mod db_tests { let layer2 = g.layer("layer2").expect("layer2"); assert!(g.layer("missing layer").is_none()); - assert_eq!(g.num_vertices(), 4); - assert_eq!(g.num_edges(), 4); - assert_eq!(dft_layer.num_edges(), 3); - assert_eq!(layer1.num_edges(), 1); - assert_eq!(layer2.num_edges(), 2); + assert_eq!(g.count_vertices(), 4); + assert_eq!(g.count_edges(), 4); + assert_eq!(dft_layer.count_edges(), 3); + assert_eq!(layer1.count_edges(), 1); + assert_eq!(layer2.count_edges(), 2); let vertex = g.vertex(11).unwrap(); let vertex_dft = dft_layer.vertex(11).unwrap(); @@ -1384,7 +1384,7 @@ mod db_tests { assert_eq!(sum_eth_btc, 30); - assert_eq!(lg.num_edges(), 1); + assert_eq!(lg.count_edges(), 1); let e = g.edge(1, 2).expect("failed to get edge"); @@ -1433,10 +1433,7 @@ mod db_tests { let g = Graph::new(); g.add_edge(0, 1, 2, NO_PROPS, Some("layer1")).unwrap(); g.add_edge(0, 1, 2, NO_PROPS, Some("layer2")).unwrap(); - assert_eq!( - g.layer("layer2").unwrap().get_unique_layers(), - vec!["layer2"] - ) + assert_eq!(g.layer("layer2").unwrap().unique_layers(), vec!["layer2"]) } #[quickcheck] diff --git a/raphtory/src/db/graph/vertices.rs b/raphtory/src/db/graph/vertices.rs index 70095a8393..b44646565d 100644 --- a/raphtory/src/db/graph/vertices.rs +++ b/raphtory/src/db/graph/vertices.rs @@ -35,7 +35,7 @@ impl Vertices { /// Returns the number of vertices in the graph. pub fn len(&self) -> usize { - self.graph.num_vertices() + self.graph.count_vertices() } /// Returns true if the graph contains no vertices. diff --git a/raphtory/src/db/graph/views/deletion_graph.rs b/raphtory/src/db/graph/views/deletion_graph.rs index e1d8cdb340..3a2c0d916c 100644 --- a/raphtory/src/db/graph/views/deletion_graph.rs +++ b/raphtory/src/db/graph/views/deletion_graph.rs @@ -513,7 +513,7 @@ mod test_deletions { vec![(0, 1)] ); - assert_eq!(g.window(1, 2).num_edges(), 1); + assert_eq!(g.window(1, 2).count_edges(), 1); assert!(g.window(11, 12).is_empty()); diff --git a/raphtory/src/db/graph/views/window_graph.rs b/raphtory/src/db/graph/views/window_graph.rs index b68c25e5aa..49cc013e5d 100644 --- a/raphtory/src/db/graph/views/window_graph.rs +++ b/raphtory/src/db/graph/views/window_graph.rs @@ -903,15 +903,15 @@ mod views_test { } let wg = WindowedGraph::new(g, window.start, window.end); - if wg.num_edges() != true_edge_count { + if wg.count_edges() != true_edge_count { println!( "failed, g.num_edges() = {}, true count = {}", - wg.num_edges(), + wg.count_edges(), true_edge_count ); println!("g.edges() = {:?}", wg.edges().collect_vec()); } - TestResult::from_bool(wg.num_edges() == true_edge_count) + TestResult::from_bool(wg.count_edges() == true_edge_count) } #[quickcheck] @@ -940,7 +940,7 @@ mod views_test { g.add_edge(t, 0, dst, NO_PROPS, None).unwrap(); } let w = g.window(i64::MIN, i64::MAX); - w.num_edges() == n + w.count_edges() == n } #[test] diff --git a/raphtory/src/graph_loader/example/company_house.rs b/raphtory/src/graph_loader/example/company_house.rs index e01c6e79bc..5473b94260 100644 --- a/raphtory/src/graph_loader/example/company_house.rs +++ b/raphtory/src/graph_loader/example/company_house.rs @@ -40,8 +40,8 @@ pub fn company_house_graph(path: Option) -> Graph { println!( "Loaded graph from encoded data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -139,8 +139,8 @@ pub fn company_house_graph(path: Option) -> Graph { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); diff --git a/raphtory/src/graph_loader/example/lotr_graph.rs b/raphtory/src/graph_loader/example/lotr_graph.rs index 14e7c98503..20ff674e49 100644 --- a/raphtory/src/graph_loader/example/lotr_graph.rs +++ b/raphtory/src/graph_loader/example/lotr_graph.rs @@ -18,8 +18,8 @@ //! //! let graph = lotr_graph(); //! -//! println!("The graph has {:?} vertices", graph.num_vertices()); -//! println!("The graph has {:?} edges", graph.num_edges()); +//! println!("The graph has {:?} vertices", graph.count_vertices()); +//! println!("The graph has {:?} edges", graph.count_edges()); //! ``` use crate::{ graph_loader::{fetch_file, source::csv_loader::CsvLoader}, diff --git a/raphtory/src/graph_loader/example/reddit_hyperlinks.rs b/raphtory/src/graph_loader/example/reddit_hyperlinks.rs index 1b7f237fb5..a416b78a0d 100644 --- a/raphtory/src/graph_loader/example/reddit_hyperlinks.rs +++ b/raphtory/src/graph_loader/example/reddit_hyperlinks.rs @@ -34,8 +34,8 @@ //! //! let graph = reddit_graph(120, false); //! -//! println!("The graph has {:?} vertices", graph.num_vertices()); -//! println!("The graph has {:?} edges", graph.num_edges()); +//! println!("The graph has {:?} vertices", graph.count_vertices()); +//! println!("The graph has {:?} edges", graph.count_edges()); //! ``` use crate::{core::Prop, db::api::mutation::AdditionOps, graph_loader::fetch_file, prelude::*}; @@ -169,7 +169,7 @@ mod reddit_test { #[test] fn check_graph() { let graph = reddit_graph(100, true); - assert_eq!(graph.num_vertices(), 16); - assert_eq!(graph.num_edges(), 9); + assert_eq!(graph.count_vertices(), 16); + assert_eq!(graph.count_edges(), 9); } } diff --git a/raphtory/src/graph_loader/example/stable_coins.rs b/raphtory/src/graph_loader/example/stable_coins.rs index 6254e6326f..4dc3d7c547 100644 --- a/raphtory/src/graph_loader/example/stable_coins.rs +++ b/raphtory/src/graph_loader/example/stable_coins.rs @@ -49,8 +49,8 @@ pub fn stable_coin_graph(path: Option, subset: bool) -> Graph { println!( "Loaded graph from encoded data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); @@ -100,8 +100,8 @@ pub fn stable_coin_graph(path: Option, subset: bool) -> Graph { println!( "Loaded graph from CSV data files {} with {} vertices, {} edges which took {} seconds", encoded_data_dir.to_str().unwrap(), - g.num_vertices(), - g.num_edges(), + g.count_vertices(), + g.count_edges(), now.elapsed().as_secs() ); diff --git a/raphtory/src/graph_loader/example/sx_superuser_graph.rs b/raphtory/src/graph_loader/example/sx_superuser_graph.rs index 24bf988db7..0b293857d7 100644 --- a/raphtory/src/graph_loader/example/sx_superuser_graph.rs +++ b/raphtory/src/graph_loader/example/sx_superuser_graph.rs @@ -41,8 +41,8 @@ //! //! let graph = sx_superuser_graph().unwrap(); //! -//! println!("The graph has {:?} vertices", graph.num_vertices()); -//! println!("The graph has {:?} edges", graph.num_edges()); +//! println!("The graph has {:?} vertices", graph.count_vertices()); +//! println!("The graph has {:?} edges", graph.count_edges()); //! ``` use crate::{ diff --git a/raphtory/src/graph_loader/mod.rs b/raphtory/src/graph_loader/mod.rs index 34c10af92c..db9b267473 100644 --- a/raphtory/src/graph_loader/mod.rs +++ b/raphtory/src/graph_loader/mod.rs @@ -176,7 +176,7 @@ mod graph_loader_test { #[test] fn test_lotr_load_graph() { let g = crate::graph_loader::example::lotr_graph::lotr_graph(); - assert_eq!(g.num_edges(), 701); + assert_eq!(g.count_edges(), 701); } #[test] @@ -189,11 +189,11 @@ mod graph_loader_test { let g_at_max = g.at(i64::MAX); let g_at_min = g.at(i64::MIN); - assert_eq!(g_at_empty.num_vertices(), 0); - assert_eq!(g_at_start.num_vertices(), 70); - assert_eq!(g_at_another.num_vertices(), 123); - assert_eq!(g_at_max.num_vertices(), 139); - assert_eq!(g_at_min.num_vertices(), 0); + assert_eq!(g_at_empty.count_vertices(), 0); + assert_eq!(g_at_start.count_vertices(), 70); + assert_eq!(g_at_another.count_vertices(), 123); + assert_eq!(g_at_max.count_vertices(), 139); + assert_eq!(g_at_min.count_vertices(), 0); } #[test] @@ -252,7 +252,7 @@ mod graph_loader_test { fn test_all_degrees_window() { let g = crate::graph_loader::example::lotr_graph::lotr_graph(); - assert_eq!(g.num_edges(), 701); + assert_eq!(g.count_edges(), 701); assert_eq!(g.vertex("Gandalf").unwrap().degree(), 49); assert_eq!( g.vertex("Gandalf").unwrap().window(1356, 24792).degree(), @@ -277,7 +277,7 @@ mod graph_loader_test { fn test_all_neighbours_window() { let g = crate::graph_loader::example::lotr_graph::lotr_graph(); - assert_eq!(g.num_edges(), 701); + assert_eq!(g.count_edges(), 701); assert_eq!(g.vertex("Gandalf").unwrap().neighbours().iter().count(), 49); for v in g @@ -330,7 +330,7 @@ mod graph_loader_test { fn test_all_edges_window() { let g = crate::graph_loader::example::lotr_graph::lotr_graph(); - assert_eq!(g.num_edges(), 701); + assert_eq!(g.count_edges(), 701); assert_eq!(g.vertex("Gandalf").unwrap().edges().count(), 59); assert_eq!( g.vertex("Gandalf") diff --git a/raphtory/src/graph_loader/source/json_loader.rs b/raphtory/src/graph_loader/source/json_loader.rs index 16ef71a984..4162445b5b 100644 --- a/raphtory/src/graph_loader/source/json_loader.rs +++ b/raphtory/src/graph_loader/source/json_loader.rs @@ -312,8 +312,8 @@ mod tests { Ok(()) }) .expect("Unable to add vertex to graph"); - assert_eq!(g.num_vertices(), 3); - assert_eq!(g.num_edges(), 0); + assert_eq!(g.count_vertices(), 3); + assert_eq!(g.count_edges(), 0); let mut names = g.vertices().into_iter().name().collect::>(); names.sort(); assert_eq!(names, vec!["test", "testbz", "testgz"]); diff --git a/raphtory/src/graphgen/preferential_attachment.rs b/raphtory/src/graphgen/preferential_attachment.rs index 95d08e5740..f1164ffdf8 100644 --- a/raphtory/src/graphgen/preferential_attachment.rs +++ b/raphtory/src/graphgen/preferential_attachment.rs @@ -72,7 +72,7 @@ pub fn ba_preferential_attachment(graph: &Graph, vertices_to_add: usize, edges_p ids.push(max_id); } - if graph.num_edges() < edges_per_step { + if graph.count_edges() < edges_per_step { for pos in 1..ids.len() { graph .add_edge(latest_time, ids[pos], ids[pos - 1], NO_PROPS, None) @@ -125,8 +125,8 @@ mod preferential_attachment_tests { fn blank_graph() { let graph = Graph::new(); ba_preferential_attachment(&graph, 1000, 10); - assert_eq!(graph.num_edges(), 10009); - assert_eq!(graph.num_vertices(), 1010); + assert_eq!(graph.count_edges(), 10009); + assert_eq!(graph.count_vertices(), 1010); } #[test] @@ -140,8 +140,8 @@ mod preferential_attachment_tests { } ba_preferential_attachment(&graph, 1000, 5); - assert_eq!(graph.num_edges(), 5009); - assert_eq!(graph.num_vertices(), 1010); + assert_eq!(graph.count_edges(), 5009); + assert_eq!(graph.count_vertices(), 1010); } #[test] @@ -149,7 +149,7 @@ mod preferential_attachment_tests { let graph = Graph::new(); random_attachment(&graph, 1000, 3); ba_preferential_attachment(&graph, 500, 4); - assert_eq!(graph.num_edges(), 5000); - assert_eq!(graph.num_vertices(), 1503); + assert_eq!(graph.count_edges(), 5000); + assert_eq!(graph.count_vertices(), 1503); } } diff --git a/raphtory/src/graphgen/random_attachment.rs b/raphtory/src/graphgen/random_attachment.rs index fb19676df0..c458ac355d 100644 --- a/raphtory/src/graphgen/random_attachment.rs +++ b/raphtory/src/graphgen/random_attachment.rs @@ -81,8 +81,8 @@ mod random_graph_test { fn blank_graph() { let graph = Graph::new(); random_attachment(&graph, 100, 20); - assert_eq!(graph.num_edges(), 2000); - assert_eq!(graph.num_vertices(), 120); + assert_eq!(graph.count_edges(), 2000); + assert_eq!(graph.count_vertices(), 120); } #[test] @@ -96,8 +96,8 @@ mod random_graph_test { } random_attachment(&graph, 1000, 5); - assert_eq!(graph.num_edges(), 5000); - assert_eq!(graph.num_vertices(), 1010); + assert_eq!(graph.count_edges(), 5000); + assert_eq!(graph.count_vertices(), 1010); } #[test] @@ -105,7 +105,7 @@ mod random_graph_test { let graph = Graph::new(); ba_preferential_attachment(&graph, 300, 7); random_attachment(&graph, 4000, 12); - assert_eq!(graph.num_edges(), 50106); - assert_eq!(graph.num_vertices(), 4307); + assert_eq!(graph.count_edges(), 50106); + assert_eq!(graph.count_vertices(), 4307); } } diff --git a/raphtory/src/lib.rs b/raphtory/src/lib.rs index 00c6c90d36..8fe67586a9 100644 --- a/raphtory/src/lib.rs +++ b/raphtory/src/lib.rs @@ -57,8 +57,8 @@ //! ); //! //! // Get the in-degree, out-degree and degree of Gandalf -//! println!("Number of vertices {:?}", graph.num_vertices()); -//! println!("Number of Edges {:?}", graph.num_edges()); +//! println!("Number of vertices {:?}", graph.count_vertices()); +//! println!("Number of Edges {:?}", graph.count_edges()); //! ``` //! //! ## Supported Operating Systems diff --git a/raphtory/src/python/graph/edge.rs b/raphtory/src/python/graph/edge.rs index 017e6363f9..7dbb5914ef 100644 --- a/raphtory/src/python/graph/edge.rs +++ b/raphtory/src/python/graph/edge.rs @@ -142,6 +142,7 @@ impl PyEdge { } /// The id of the edge. + #[getter] pub fn id(&self) -> (u64, u64) { self.edge.id() } @@ -155,7 +156,6 @@ impl PyEdge { /// Returns: /// A list of timestamps. /// - pub fn history(&self) -> Vec { self.edge.history() } @@ -170,6 +170,7 @@ impl PyEdge { /// /// Returns: /// The source vertex of the Edge. + #[getter] fn src(&self) -> PyVertex { self.edge.src().into() } @@ -178,6 +179,7 @@ impl PyEdge { /// /// Returns: /// The destination vertex of the Edge. + #[getter] fn dst(&self) -> PyVertex { self.edge.dst().into() } @@ -188,6 +190,7 @@ impl PyEdge { /// /// Returns: /// The start time of the Edge. + #[getter] pub fn start(&self) -> Option { self.edge.start() } @@ -196,6 +199,7 @@ impl PyEdge { /// /// Returns: /// the start datetime of the Edge. + #[getter] pub fn start_date_time(&self) -> Option { let start_time = self.edge.start()?; NaiveDateTime::from_timestamp_millis(start_time) @@ -205,6 +209,7 @@ impl PyEdge { /// /// Returns: /// The end time of the Edge. + #[getter] pub fn end(&self) -> Option { self.edge.end() } @@ -213,6 +218,7 @@ impl PyEdge { /// /// Returns: /// The end datetime of the Edge + #[getter] pub fn end_date_time(&self) -> Option { let end_time = self.edge.end()?; NaiveDateTime::from_timestamp_millis(end_time) @@ -328,6 +334,7 @@ impl PyEdge { /// /// Returns: /// (int) The earliest time of an edge + #[getter] pub fn earliest_time(&self) -> Option { self.edge.earliest_time() } @@ -336,6 +343,7 @@ impl PyEdge { /// /// Returns: /// the earliest datetime of an edge + #[getter] pub fn earliest_date_time(&self) -> Option { NaiveDateTime::from_timestamp_millis(self.edge.earliest_time()?) } @@ -344,6 +352,7 @@ impl PyEdge { /// /// Returns: /// (int) The latest time of an edge + #[getter] pub fn latest_time(&self) -> Option { self.edge.latest_time() } @@ -352,6 +361,7 @@ impl PyEdge { /// /// Returns: /// the latest datetime of an edge + #[getter] pub fn latest_date_time(&self) -> Option { let latest_time = self.edge.latest_time()?; NaiveDateTime::from_timestamp_millis(latest_time) @@ -361,6 +371,7 @@ impl PyEdge { /// /// Returns: /// (int) The time of an exploded edge + #[getter] pub fn time(&self) -> Option { self.edge.time() } @@ -368,7 +379,8 @@ impl PyEdge { /// Gets the names of the layers this edge belongs to /// /// Returns: - /// ([str]) The name of the layer + /// (str) The name of the layer + #[getter] pub fn layer_names(&self) -> Vec { self.edge.layer_names() } @@ -377,6 +389,7 @@ impl PyEdge { /// /// Returns: /// ([str]) The name of the layer + #[getter] pub fn layer_name(&self) -> Option { self.edge.layer_name() } @@ -385,6 +398,7 @@ impl PyEdge { /// /// Returns: /// (datetime) the datetime of an exploded edge + #[getter] pub fn date_time(&self) -> Option { let date_time = self.edge.time()?; NaiveDateTime::from_timestamp_millis(date_time) @@ -489,20 +503,18 @@ impl PyEdges { self.py_iter().into() } - fn __len__(&self) -> usize { - self.iter().count() - } - /// Returns all source vertices of the Edges as an iterable. /// /// Returns: /// The source vertices of the Edges as an iterable. + #[getter] fn src(&self) -> PyVertexIterable { let builder = self.builder.clone(); (move || builder().src()).into() } /// Returns all destination vertices as an iterable + #[getter] fn dst(&self) -> PyVertexIterable { let builder = self.builder.clone(); (move || builder().dst()).into() @@ -513,11 +525,6 @@ impl PyEdges { self.py_iter().collect() } - /// Returns the first edge - fn first(&self) -> Option { - self.py_iter().next() - } - /// Returns the number of edges fn count(&self) -> usize { self.py_iter().count() @@ -549,6 +556,7 @@ impl PyEdges { } /// Returns the earliest time of the edges. + #[getter] fn earliest_time(&self) -> OptionI64Iterable { let edges: Arc< dyn Fn() -> Box> + Send> + Send + Sync, @@ -557,6 +565,7 @@ impl PyEdges { } /// Returns the latest time of the edges. + #[getter] fn latest_time(&self) -> OptionI64Iterable { let edges: Arc< dyn Fn() -> Box> + Send> + Send + Sync, @@ -572,6 +581,7 @@ impl PyEdges { } /// Returns all ids of the edges. + #[getter] fn id(&self) -> PyGenericIterable { let edges = self.builder.clone(); (move || edges().id()).into() @@ -603,25 +613,29 @@ impl PyNestedEdges { /// Returns all source vertices of the Edges as an iterable. /// /// Returns: - /// The source vertices of the Edges as an iterable. + /// The source verticeÍs of the Edges as an iterable. + #[getter] fn src(&self) -> PyNestedVertexIterable { let builder = self.builder.clone(); (move || builder().src()).into() } /// Returns all destination vertices as an iterable + #[getter] fn dst(&self) -> PyNestedVertexIterable { let builder = self.builder.clone(); (move || builder().dst()).into() } /// Returns the earliest time of the edges. + #[getter] fn earliest_time(&self) -> NestedOptionI64Iterable { let edges = self.builder.clone(); (move || edges().earliest_time()).into() } /// Returns the latest time of the edges. + #[getter] fn latest_time(&self) -> NestedOptionI64Iterable { let edges = self.builder.clone(); (move || edges().latest_time()).into() @@ -636,6 +650,7 @@ impl PyNestedEdges { } /// Returns all ids of the edges. + #[getter] fn id(&self) -> NestedU64U64Iterable { let edges = self.builder.clone(); (move || edges().id()).into() diff --git a/raphtory/src/python/graph/properties/temporal_props.rs b/raphtory/src/python/graph/properties/temporal_props.rs index 1e5b463f12..4443abc577 100644 --- a/raphtory/src/python/graph/properties/temporal_props.rs +++ b/raphtory/src/python/graph/properties/temporal_props.rs @@ -265,12 +265,8 @@ impl PyTemporalProp { it_iter.fold(first, |acc, elem| if acc.1 >= elem.1 { acc } else { elem }) } - pub fn len(&self) -> usize { - self.prop.iter().count() - } - pub fn count(&self) -> usize { - self.len() + self.prop.iter().count() } pub fn average(&self) -> Option { @@ -279,7 +275,7 @@ impl PyTemporalProp { pub fn mean(&self) -> Option { let sum: Prop = self.sum(); - let count: usize = self.len(); + let count: usize = self.count(); if count == 0 { return None; } @@ -548,6 +544,7 @@ py_iterable_comp!( #[pymethods] impl PyTemporalPropList { + #[getter] pub fn history(&self) -> PyPropHistList { let builder = self.builder.clone(); (move || builder().map(|p| p.map(|v| v.history()).unwrap_or_default())).into() @@ -727,6 +724,7 @@ py_iterable_comp!( #[pymethods] impl PyTemporalPropListList { + #[getter] pub fn history(&self) -> PyPropHistListList { let builder = self.builder.clone(); (move || builder().map(|it| it.map(|p| p.map(|v| v.history()).unwrap_or_default()))).into() @@ -869,8 +867,8 @@ impl PropIterable { } } - pub fn len(&self) -> usize { - self.collect().len() + pub fn count(&self) -> usize { + self.iter().count() } pub fn min(&self) -> PropValue { @@ -971,10 +969,6 @@ impl PyPropHistValueList { .into() } - pub fn len(&self) -> UsizeIterable { - self.count() - } - pub fn median(&self) -> PyPropValueList { let builder = self.builder.clone(); (move || { @@ -1050,12 +1044,8 @@ impl PyPropValueList { .flatten() } - pub fn len(&self) -> usize { - self.collect().len() - } - pub fn count(&self) -> usize { - self.len() + self.iter().count() } pub fn min(&self) -> PropValue { @@ -1248,10 +1238,6 @@ impl PyPropValueListList { (move || builder().map(|it| it.count())).into() } - pub fn len(&self) -> UsizeIterable { - self.count() - } - pub fn drop_none(&self) -> PyPropValueListList { let builder = self.builder.clone(); (move || builder().map(|it| it.filter(|x| x.is_some()))).into() diff --git a/raphtory/src/python/graph/vertex.rs b/raphtory/src/python/graph/vertex.rs index 133874173f..0cee2bcbb8 100644 --- a/raphtory/src/python/graph/vertex.rs +++ b/raphtory/src/python/graph/vertex.rs @@ -113,6 +113,7 @@ impl PyVertex { /// /// Returns: /// The id of the vertex as an integer. + #[getter] pub fn id(&self) -> u64 { self.vertex.id() } @@ -121,6 +122,7 @@ impl PyVertex { /// /// Returns: /// The name of the vertex as a string. + #[getter] pub fn name(&self) -> String { self.vertex.name() } @@ -132,6 +134,7 @@ impl PyVertex { /// /// Returns: /// The earliest time that the vertex exists as an integer. + #[getter] pub fn earliest_time(&self) -> Option { self.vertex.earliest_time() } @@ -143,6 +146,7 @@ impl PyVertex { /// /// Returns: /// The earliest datetime that the vertex exists as an integer. + #[getter] pub fn earliest_date_time(&self) -> Option { let earliest_time = self.vertex.earliest_time()?; NaiveDateTime::from_timestamp_millis(earliest_time) @@ -152,6 +156,7 @@ impl PyVertex { /// /// Returns: /// The latest time that the vertex exists as an integer. + #[getter] pub fn latest_time(&self) -> Option { self.vertex.latest_time() } @@ -163,6 +168,7 @@ impl PyVertex { /// /// Returns: /// The latest datetime that the vertex exists as an integer. + #[getter] pub fn latest_date_time(&self) -> Option { let latest_time = self.vertex.latest_time()?; NaiveDateTime::from_timestamp_millis(latest_time) @@ -202,6 +208,7 @@ impl PyVertex { /// /// Returns: /// A list of `Edge` objects. + #[getter] pub fn edges(&self) -> PyEdges { let vertex = self.vertex.clone(); (move || vertex.edges()).into() @@ -211,6 +218,7 @@ impl PyVertex { /// /// Returns: /// A list of `Edge` objects. + #[getter] pub fn in_edges(&self) -> PyEdges { let vertex = self.vertex.clone(); (move || vertex.in_edges()).into() @@ -220,6 +228,7 @@ impl PyVertex { /// /// Returns: /// A list of `Edge` objects. + #[getter] pub fn out_edges(&self) -> PyEdges { let vertex = self.vertex.clone(); (move || vertex.out_edges()).into() @@ -230,6 +239,7 @@ impl PyVertex { /// Returns: /// /// A list of `Vertex` objects. + #[getter] pub fn neighbours(&self) -> PyPathFromVertex { self.vertex.neighbours().into() } @@ -238,6 +248,7 @@ impl PyVertex { /// /// Returns: /// A list of `Vertex` objects. + #[getter] pub fn in_neighbours(&self) -> PyPathFromVertex { self.vertex.in_neighbours().into() } @@ -246,6 +257,7 @@ impl PyVertex { /// /// Returns: /// A list of `Vertex` objects. + #[getter] pub fn out_neighbours(&self) -> PyPathFromVertex { self.vertex.out_neighbours().into() } @@ -256,6 +268,7 @@ impl PyVertex { /// /// Returns: /// The earliest time that this vertex is valid or None if the vertex is valid for all times. + #[getter] pub fn start(&self) -> Option { self.vertex.start() } @@ -264,6 +277,7 @@ impl PyVertex { /// /// Returns: /// The earliest datetime that this vertex is valid or None if the vertex is valid for all times. + #[getter] pub fn start_date_time(&self) -> Option { let start_time = self.vertex.start()?; NaiveDateTime::from_timestamp_millis(start_time) @@ -273,6 +287,7 @@ impl PyVertex { /// /// Returns: /// The latest time that this vertex is valid or None if the vertex is valid for all times. + #[getter] pub fn end(&self) -> Option { self.vertex.end() } @@ -281,6 +296,7 @@ impl PyVertex { /// /// Returns: /// The latest datetime that this vertex is valid or None if the vertex is valid for all times. + #[getter] pub fn end_date_time(&self) -> Option { let end_time = self.vertex.end()?; NaiveDateTime::from_timestamp_millis(end_time) @@ -544,24 +560,28 @@ impl PyVertices { } /// Returns an iterator over the vertices ids + #[getter] fn id(&self) -> U64Iterable { let vertices = self.vertices.clone(); (move || vertices.id()).into() } /// Returns an iterator over the vertices name + #[getter] fn name(&self) -> StringIterable { let vertices = self.vertices.clone(); (move || vertices.name()).into() } /// Returns an iterator over the vertices earliest time + #[getter] fn earliest_time(&self) -> OptionI64Iterable { let vertices = self.vertices.clone(); (move || vertices.earliest_time()).into() } /// Returns an iterator over the vertices latest time + #[getter] fn latest_time(&self) -> OptionI64Iterable { let vertices = self.vertices.clone(); (move || vertices.latest_time()).into() @@ -604,6 +624,7 @@ impl PyVertices { /// /// Returns: /// An iterator of edges of the vertices + #[getter] fn edges(&self) -> PyNestedEdges { let clone = self.vertices.clone(); (move || clone.edges()).into() @@ -613,6 +634,7 @@ impl PyVertices { /// /// Returns: /// An iterator of in edges of the vertices + #[getter] fn in_edges(&self) -> PyNestedEdges { let clone = self.vertices.clone(); (move || clone.in_edges()).into() @@ -622,6 +644,7 @@ impl PyVertices { /// /// Returns: /// An iterator of out edges of the vertices + #[getter] fn out_edges(&self) -> PyNestedEdges { let clone = self.vertices.clone(); (move || clone.out_edges()).into() @@ -631,6 +654,7 @@ impl PyVertices { /// /// Returns: /// An iterator of the neighbours of the vertices + #[getter] fn neighbours(&self) -> PyPathFromGraph { self.vertices.neighbours().into() } @@ -639,6 +663,7 @@ impl PyVertices { /// /// Returns: /// An iterator of the in neighbours of the vertices + #[getter] fn in_neighbours(&self) -> PyPathFromGraph { self.vertices.in_neighbours().into() } @@ -647,6 +672,7 @@ impl PyVertices { /// /// Returns: /// An iterator of the out neighbours of the vertices + #[getter] fn out_neighbours(&self) -> PyPathFromGraph { self.vertices.out_neighbours().into() } @@ -658,16 +684,19 @@ impl PyVertices { //***** Perspective APIS ******// /// Returns the start time of the vertices + #[getter] pub fn start(&self) -> Option { self.vertices.start() } /// Returns the end time of the vertices + #[getter] pub fn end(&self) -> Option { self.vertices.end() } #[doc = window_size_doc_string!()] + #[getter] pub fn window_size(&self) -> Option { self.vertices.window_size() } @@ -771,10 +800,6 @@ impl PyVertices { .ok_or_else(|| PyIndexError::new_err("Vertex does not exist")) } - pub fn __call__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { - slf - } - pub fn __repr__(&self) -> String { self.repr() } @@ -800,21 +825,25 @@ impl PyPathFromGraph { fn collect(&self) -> Vec> { self.__iter__().into_iter().map(|it| it.collect()).collect() } + #[getter] fn id(&self) -> NestedU64Iterable { let path = self.path.clone(); (move || path.id()).into() } + #[getter] fn name(&self) -> NestedStringIterable { let path = self.path.clone(); (move || path.name()).into() } + #[getter] fn earliest_time(&self) -> NestedOptionI64Iterable { let path = self.path.clone(); (move || path.earliest_time()).into() } + #[getter] fn latest_time(&self) -> NestedOptionI64Iterable { let path = self.path.clone(); (move || path.latest_time()).into() @@ -841,43 +870,52 @@ impl PyPathFromGraph { (move || path.out_degree()).into() } + #[getter] fn edges(&self) -> PyNestedEdges { let clone = self.path.clone(); (move || clone.edges()).into() } + #[getter] fn in_edges(&self) -> PyNestedEdges { let clone = self.path.clone(); (move || clone.in_edges()).into() } + #[getter] fn out_edges(&self) -> PyNestedEdges { let clone = self.path.clone(); (move || clone.out_edges()).into() } + #[getter] fn out_neighbours(&self) -> Self { self.path.out_neighbours().into() } + #[getter] fn in_neighbours(&self) -> Self { self.path.in_neighbours().into() } + #[getter] fn neighbours(&self) -> Self { self.path.neighbours().into() } //****** Perspective APIS ******// + #[getter] pub fn start(&self) -> Option { self.path.start() } + #[getter] pub fn end(&self) -> Option { self.path.end() } #[doc = window_size_doc_string!()] + #[getter] pub fn window_size(&self) -> Option { self.path.window_size() } @@ -994,21 +1032,25 @@ impl PyPathFromVertex { self.__iter__().into_iter().collect() } + #[getter] fn id(&self) -> U64Iterable { let path = self.path.clone(); (move || path.id()).into() } + #[getter] fn name(&self) -> StringIterable { let path = self.path.clone(); (move || path.name()).into() } + #[getter] fn earliest_time(&self) -> OptionI64Iterable { let path = self.path.clone(); (move || path.earliest_time()).into() } + #[getter] fn latest_time(&self) -> OptionI64Iterable { let path = self.path.clone(); (move || path.latest_time()).into() @@ -1035,43 +1077,52 @@ impl PyPathFromVertex { (move || path.degree()).into() } + #[getter] fn edges(&self) -> PyEdges { let path = self.path.clone(); (move || path.edges()).into() } + #[getter] fn in_edges(&self) -> PyEdges { let path = self.path.clone(); (move || path.in_edges()).into() } + #[getter] fn out_edges(&self) -> PyEdges { let path = self.path.clone(); (move || path.out_edges()).into() } + #[getter] fn out_neighbours(&self) -> Self { self.path.out_neighbours().into() } + #[getter] fn in_neighbours(&self) -> Self { self.path.in_neighbours().into() } + #[getter] fn neighbours(&self) -> Self { self.path.neighbours().into() } //****** Perspective APIS ******// + #[getter] pub fn start(&self) -> Option { self.path.start() } + #[getter] pub fn end(&self) -> Option { self.path.end() } #[doc = window_size_doc_string!()] + #[getter] pub fn window_size(&self) -> Option { self.path.window_size() } @@ -1211,21 +1262,25 @@ py_iterable!(PyVertexIterable, VertexView, PyVertex); #[pymethods] impl PyVertexIterable { + #[getter] fn id(&self) -> U64Iterable { let builder = self.builder.clone(); (move || builder().id()).into() } + #[getter] fn name(&self) -> StringIterable { let vertices = self.builder.clone(); (move || vertices().name()).into() } + #[getter] fn earliest_time(&self) -> OptionI64Iterable { let vertices = self.builder.clone(); (move || vertices().earliest_time()).into() } + #[getter] fn latest_time(&self) -> OptionI64Iterable { let vertices = self.builder.clone(); (move || vertices().latest_time()).into() @@ -1252,31 +1307,37 @@ impl PyVertexIterable { (move || vertices().out_degree()).into() } + #[getter] fn edges(&self) -> PyEdges { let clone = self.builder.clone(); (move || clone().edges()).into() } + #[getter] fn in_edges(&self) -> PyEdges { let clone = self.builder.clone(); (move || clone().in_edges()).into() } + #[getter] fn out_edges(&self) -> PyEdges { let clone = self.builder.clone(); (move || clone().out_edges()).into() } + #[getter] fn out_neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().out_neighbours()).into() } + #[getter] fn in_neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().in_neighbours()).into() } + #[getter] fn neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().neighbours()).into() @@ -1287,21 +1348,25 @@ py_nested_iterable!(PyNestedVertexIterable, VertexView); #[pymethods] impl PyNestedVertexIterable { + #[getter] fn id(&self) -> NestedU64Iterable { let builder = self.builder.clone(); (move || builder().id()).into() } + #[getter] fn name(&self) -> NestedStringIterable { let vertices = self.builder.clone(); (move || vertices().name()).into() } + #[getter] fn earliest_time(&self) -> NestedOptionI64Iterable { let vertices = self.builder.clone(); (move || vertices().earliest_time()).into() } + #[getter] fn latest_time(&self) -> NestedOptionI64Iterable { let vertices = self.builder.clone(); (move || vertices().latest_time()).into() @@ -1328,31 +1393,37 @@ impl PyNestedVertexIterable { (move || vertices().out_degree()).into() } + #[getter] fn edges(&self) -> PyNestedEdges { let clone = self.builder.clone(); (move || clone().edges()).into() } + #[getter] fn in_edges(&self) -> PyNestedEdges { let clone = self.builder.clone(); (move || clone().in_edges()).into() } + #[getter] fn out_edges(&self) -> PyNestedEdges { let clone = self.builder.clone(); (move || clone().out_edges()).into() } + #[getter] fn out_neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().out_neighbours()).into() } + #[getter] fn in_neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().in_neighbours()).into() } + #[getter] fn neighbours(&self) -> Self { let builder = self.builder.clone(); (move || builder().neighbours()).into() diff --git a/raphtory/src/python/graph/views/graph_view.rs b/raphtory/src/python/graph/views/graph_view.rs index 2e3bbed8be..624e2923cf 100644 --- a/raphtory/src/python/graph/views/graph_view.rs +++ b/raphtory/src/python/graph/views/graph_view.rs @@ -102,8 +102,9 @@ impl IntoPy for VertexSubgraph { #[pymethods] impl PyGraphView { /// Return all the layer ids in the graph - pub fn get_unique_layers(&self) -> Vec { - self.graph.get_unique_layers() + #[getter] + pub fn unique_layers(&self) -> Vec { + self.graph.unique_layers() } //****** Metrics APIs ******// @@ -112,6 +113,7 @@ impl PyGraphView { /// /// Returns: /// the timestamp of the earliest activity in the graph + #[getter] pub fn earliest_time(&self) -> Option { self.graph.earliest_time() } @@ -120,6 +122,7 @@ impl PyGraphView { /// /// Returns: /// the datetime of the earliest activity in the graph + #[getter] pub fn earliest_date_time(&self) -> Option { let earliest_time = self.graph.earliest_time()?; NaiveDateTime::from_timestamp_millis(earliest_time) @@ -129,6 +132,7 @@ impl PyGraphView { /// /// Returns: /// the timestamp of the latest activity in the graph + #[getter] pub fn latest_time(&self) -> Option { self.graph.latest_time() } @@ -137,6 +141,7 @@ impl PyGraphView { /// /// Returns: /// the datetime of the latest activity in the graph + #[getter] pub fn latest_date_time(&self) -> Option { let latest_time = self.graph.latest_time()?; NaiveDateTime::from_timestamp_millis(latest_time) @@ -146,24 +151,24 @@ impl PyGraphView { /// /// Returns: /// the number of edges in the graph - pub fn num_edges(&self) -> usize { - self.graph.num_edges() + pub fn count_edges(&self) -> usize { + self.graph.count_edges() } /// Number of edges in the graph /// /// Returns: /// the number of temporal edges in the graph - pub fn num_temporal_edges(&self) -> usize { - self.graph.num_temporal_edges() + pub fn count_temporal_edges(&self) -> usize { + self.graph.count_temporal_edges() } /// Number of vertices in the graph /// /// Returns: /// the number of vertices in the graph - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() + pub fn count_vertices(&self) -> usize { + self.graph.count_vertices() } /// Returns true if the graph contains the specified vertex @@ -242,6 +247,7 @@ impl PyGraphView { /// /// Returns: /// the default start time for perspectives over the view + #[getter] pub fn start(&self) -> Option { self.graph.start() } @@ -250,6 +256,7 @@ impl PyGraphView { /// /// Returns: /// the default start datetime for perspectives over the view + #[getter] pub fn start_date_time(&self) -> Option { let start_time = self.graph.start()?; NaiveDateTime::from_timestamp_millis(start_time) @@ -259,6 +266,7 @@ impl PyGraphView { /// /// Returns: /// the default end time for perspectives over the view + #[getter] pub fn end(&self) -> Option { self.graph.end() } @@ -272,6 +280,7 @@ impl PyGraphView { /// /// Returns: /// the default end datetime for perspectives over the view + #[getter] pub fn end_date_time(&self) -> Option { let end_time = self.graph.end()?; NaiveDateTime::from_timestamp_millis(end_time) @@ -407,9 +416,9 @@ impl PyGraphView { impl Repr for PyGraphView { fn repr(&self) -> String { - let num_edges = self.graph.num_edges(); - let num_vertices = self.graph.num_vertices(); - let num_temporal_edges: usize = self.graph.num_temporal_edges(); + let num_edges = self.graph.count_edges(); + let num_vertices = self.graph.count_vertices(); + let num_temporal_edges: usize = self.graph.count_temporal_edges(); let earliest_time = self.graph.earliest_time().repr(); let latest_time = self.graph.latest_time().repr(); let properties: String = self diff --git a/raphtory/src/search/mod.rs b/raphtory/src/search/mod.rs index 611b4a8742..3bbea3f0ba 100644 --- a/raphtory/src/search/mod.rs +++ b/raphtory/src/search/mod.rs @@ -322,7 +322,7 @@ impl IndexedGraph { let writer = Arc::new(parking_lot::RwLock::new(index.writer(100_000_000)?)); - let v_ids = (0..g.num_vertices()).collect::>(); + let v_ids = (0..g.count_vertices()).collect::>(); v_ids.par_chunks(128).try_for_each(|v_ids| { let writer_lock = writer.clone(); @@ -459,7 +459,7 @@ impl IndexedGraph { let writer = Arc::new(parking_lot::RwLock::new(index.writer(100_000_000)?)); - let e_ids = (0..g.num_edges()).collect::>(); + let e_ids = (0..g.count_edges()).collect::>(); let edge_filter = g.edge_filter(); e_ids.par_chunks(128).try_for_each(|e_ids| { let writer_lock = writer.clone(); @@ -744,7 +744,7 @@ mod test { #[ignore = "this test is for experiments with the jira graph"] fn load_jira_graph() -> Result<(), GraphError> { let graph = Graph::load_from_file("/tmp/graphs/jira").expect("failed to load graph"); - assert!(graph.num_vertices() > 0); + assert!(graph.count_vertices() > 0); let now = SystemTime::now();