From 56672c1a1f65cb8917d639de5085273b81761e0b Mon Sep 17 00:00:00 2001 From: Andrei Ivanov Date: Thu, 30 Jan 2025 16:14:30 -0800 Subject: [PATCH 1/2] Addressing warnings related to the `multi_class` parameter in `LogisticRegression`. --- .../distributed/graphsage/node_classification_unsupervised.py | 2 +- examples/pytorch/graphsage/advanced/model.py | 2 +- examples/pytorch/graphsage/dist/train_dist_unsupervised.py | 2 +- examples/pytorch/metapath2vec/test.py | 4 ++-- examples/pytorch/multigpu/multi_gpu_link_prediction.py | 2 +- examples/pytorch/node2vec/model.py | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/distributed/graphsage/node_classification_unsupervised.py b/examples/distributed/graphsage/node_classification_unsupervised.py index 4b71f599064b..2b030646bf28 100644 --- a/examples/distributed/graphsage/node_classification_unsupervised.py +++ b/examples/distributed/graphsage/node_classification_unsupervised.py @@ -175,7 +175,7 @@ def compute_acc(emb, labels, train_nids, val_nids, test_nids): labels = labels.cpu().numpy() emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], labels[train_nids]) pred = lr.predict(emb) diff --git a/examples/pytorch/graphsage/advanced/model.py b/examples/pytorch/graphsage/advanced/model.py index 7dede9643cc4..edb965b78936 100644 --- a/examples/pytorch/graphsage/advanced/model.py +++ b/examples/pytorch/graphsage/advanced/model.py @@ -104,7 +104,7 @@ def compute_acc_unsupervised(emb, labels, train_nids, val_nids, test_nids): emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], train_labels) pred = lr.predict(emb) diff --git a/examples/pytorch/graphsage/dist/train_dist_unsupervised.py b/examples/pytorch/graphsage/dist/train_dist_unsupervised.py index f3eb1a24305b..24d737f4f280 100644 --- a/examples/pytorch/graphsage/dist/train_dist_unsupervised.py +++ b/examples/pytorch/graphsage/dist/train_dist_unsupervised.py @@ -175,7 +175,7 @@ def compute_acc(emb, labels, train_nids, val_nids, test_nids): labels = labels.cpu().numpy() emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], labels[train_nids]) pred = lr.predict(emb) diff --git a/examples/pytorch/metapath2vec/test.py b/examples/pytorch/metapath2vec/test.py index 7f293daa1b7f..6ec68f86c7f1 100644 --- a/examples/pytorch/metapath2vec/test.py +++ b/examples/pytorch/metapath2vec/test.py @@ -95,11 +95,11 @@ file.close() print("beging predicting") clf_venue = LogisticRegression( - random_state=0, solver="lbfgs", multi_class="multinomial" + random_state=0, solver="lbfgs", ).fit(venue_training, venue_label) y_pred_venue = clf_venue.predict(venue_testing) clf_author = LogisticRegression( - random_state=0, solver="lbfgs", multi_class="multinomial" + random_state=0, solver="lbfgs", ).fit(author_training, author_label) y_pred_author = clf_author.predict(author_testing) macro_average_venue += f1_score( diff --git a/examples/pytorch/multigpu/multi_gpu_link_prediction.py b/examples/pytorch/multigpu/multi_gpu_link_prediction.py index bb0c64f75c4f..52b5204e4c8e 100644 --- a/examples/pytorch/multigpu/multi_gpu_link_prediction.py +++ b/examples/pytorch/multigpu/multi_gpu_link_prediction.py @@ -142,7 +142,7 @@ def compute_acc_unsupervised(emb, labels, train_nids, val_nids, test_nids): test_nids = test_nids.cpu().numpy() test_labels = labels[test_nids] emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], train_labels) pred = lr.predict(emb) f1_micro_eval = skm.f1_score(val_labels, pred[val_nids], average="micro") diff --git a/examples/pytorch/node2vec/model.py b/examples/pytorch/node2vec/model.py index 3b2db93801c0..2dce23225d3e 100644 --- a/examples/pytorch/node2vec/model.py +++ b/examples/pytorch/node2vec/model.py @@ -191,7 +191,7 @@ def evaluate(self, x_train, y_train, x_val, y_val): x_train, y_train = x_train.cpu().numpy(), y_train.cpu().numpy() x_val, y_val = x_val.cpu().numpy(), y_val.cpu().numpy() lr = LogisticRegression( - solver="lbfgs", multi_class="auto", max_iter=150 + solver="lbfgs", max_iter=150 ).fit(x_train, y_train) return lr.score(x_val, y_val) From f0e415ef36670ccc9049c5ff581d4ac8ee073a74 Mon Sep 17 00:00:00 2001 From: Andrei Ivanov Date: Thu, 30 Jan 2025 20:27:42 -0800 Subject: [PATCH 2/2] Fixing lint problems --- examples/pytorch/metapath2vec/test.py | 12 ++++++------ examples/pytorch/node2vec/model.py | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/examples/pytorch/metapath2vec/test.py b/examples/pytorch/metapath2vec/test.py index 6ec68f86c7f1..ca6ab0ae268f 100644 --- a/examples/pytorch/metapath2vec/test.py +++ b/examples/pytorch/metapath2vec/test.py @@ -94,13 +94,13 @@ author_true = np.array(author_true) file.close() print("beging predicting") - clf_venue = LogisticRegression( - random_state=0, solver="lbfgs", - ).fit(venue_training, venue_label) + clf_venue = LogisticRegression(random_state=0, solver="lbfgs").fit( + venue_training, venue_label + ) y_pred_venue = clf_venue.predict(venue_testing) - clf_author = LogisticRegression( - random_state=0, solver="lbfgs", - ).fit(author_training, author_label) + clf_author = LogisticRegression(random_state=0, solver="lbfgs").fit( + author_training, author_label + ) y_pred_author = clf_author.predict(author_testing) macro_average_venue += f1_score( venue_true, y_pred_venue, average="macro" diff --git a/examples/pytorch/node2vec/model.py b/examples/pytorch/node2vec/model.py index 2dce23225d3e..282799bbe1fb 100644 --- a/examples/pytorch/node2vec/model.py +++ b/examples/pytorch/node2vec/model.py @@ -190,9 +190,9 @@ def evaluate(self, x_train, y_train, x_val, y_val): x_train, y_train = x_train.cpu().numpy(), y_train.cpu().numpy() x_val, y_val = x_val.cpu().numpy(), y_val.cpu().numpy() - lr = LogisticRegression( - solver="lbfgs", max_iter=150 - ).fit(x_train, y_train) + lr = LogisticRegression(solver="lbfgs", max_iter=150).fit( + x_train, y_train + ) return lr.score(x_val, y_val)