diff --git a/examples/distributed/graphsage/node_classification_unsupervised.py b/examples/distributed/graphsage/node_classification_unsupervised.py index 4b71f599064b..2b030646bf28 100644 --- a/examples/distributed/graphsage/node_classification_unsupervised.py +++ b/examples/distributed/graphsage/node_classification_unsupervised.py @@ -175,7 +175,7 @@ def compute_acc(emb, labels, train_nids, val_nids, test_nids): labels = labels.cpu().numpy() emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], labels[train_nids]) pred = lr.predict(emb) diff --git a/examples/pytorch/graphsage/advanced/model.py b/examples/pytorch/graphsage/advanced/model.py index 7dede9643cc4..edb965b78936 100644 --- a/examples/pytorch/graphsage/advanced/model.py +++ b/examples/pytorch/graphsage/advanced/model.py @@ -104,7 +104,7 @@ def compute_acc_unsupervised(emb, labels, train_nids, val_nids, test_nids): emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], train_labels) pred = lr.predict(emb) diff --git a/examples/pytorch/graphsage/dist/train_dist_unsupervised.py b/examples/pytorch/graphsage/dist/train_dist_unsupervised.py index f3eb1a24305b..24d737f4f280 100644 --- a/examples/pytorch/graphsage/dist/train_dist_unsupervised.py +++ b/examples/pytorch/graphsage/dist/train_dist_unsupervised.py @@ -175,7 +175,7 @@ def compute_acc(emb, labels, train_nids, val_nids, test_nids): labels = labels.cpu().numpy() emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], labels[train_nids]) pred = lr.predict(emb) diff --git a/examples/pytorch/metapath2vec/test.py b/examples/pytorch/metapath2vec/test.py index 7f293daa1b7f..ca6ab0ae268f 100644 --- a/examples/pytorch/metapath2vec/test.py +++ b/examples/pytorch/metapath2vec/test.py @@ -94,13 +94,13 @@ author_true = np.array(author_true) file.close() print("beging predicting") - clf_venue = LogisticRegression( - random_state=0, solver="lbfgs", multi_class="multinomial" - ).fit(venue_training, venue_label) + clf_venue = LogisticRegression(random_state=0, solver="lbfgs").fit( + venue_training, venue_label + ) y_pred_venue = clf_venue.predict(venue_testing) - clf_author = LogisticRegression( - random_state=0, solver="lbfgs", multi_class="multinomial" - ).fit(author_training, author_label) + clf_author = LogisticRegression(random_state=0, solver="lbfgs").fit( + author_training, author_label + ) y_pred_author = clf_author.predict(author_testing) macro_average_venue += f1_score( venue_true, y_pred_venue, average="macro" diff --git a/examples/pytorch/multigpu/multi_gpu_link_prediction.py b/examples/pytorch/multigpu/multi_gpu_link_prediction.py index bb0c64f75c4f..52b5204e4c8e 100644 --- a/examples/pytorch/multigpu/multi_gpu_link_prediction.py +++ b/examples/pytorch/multigpu/multi_gpu_link_prediction.py @@ -142,7 +142,7 @@ def compute_acc_unsupervised(emb, labels, train_nids, val_nids, test_nids): test_nids = test_nids.cpu().numpy() test_labels = labels[test_nids] emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True) - lr = lm.LogisticRegression(multi_class="multinomial", max_iter=10000) + lr = lm.LogisticRegression(max_iter=10000) lr.fit(emb[train_nids], train_labels) pred = lr.predict(emb) f1_micro_eval = skm.f1_score(val_labels, pred[val_nids], average="micro") diff --git a/examples/pytorch/node2vec/model.py b/examples/pytorch/node2vec/model.py index 3b2db93801c0..282799bbe1fb 100644 --- a/examples/pytorch/node2vec/model.py +++ b/examples/pytorch/node2vec/model.py @@ -190,9 +190,9 @@ def evaluate(self, x_train, y_train, x_val, y_val): x_train, y_train = x_train.cpu().numpy(), y_train.cpu().numpy() x_val, y_val = x_val.cpu().numpy(), y_val.cpu().numpy() - lr = LogisticRegression( - solver="lbfgs", multi_class="auto", max_iter=150 - ).fit(x_train, y_train) + lr = LogisticRegression(solver="lbfgs", max_iter=150).fit( + x_train, y_train + ) return lr.score(x_val, y_val)