diff --git a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessClusterUpdateDialog.java b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessClusterUpdateDialog.java
index 55a31729f2..a7ea37d96a 100644
--- a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessClusterUpdateDialog.java
+++ b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessClusterUpdateDialog.java
@@ -22,9 +22,7 @@
package com.microsoft.azure.sparkserverless.serverexplore.ui;
-import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.project.Project;
-import com.microsoft.azure.hdinsight.common.logger.ILogger;
import com.microsoft.azure.hdinsight.sdk.common.azure.serverless.AzureSparkServerlessCluster;
import com.microsoft.azure.sparkserverless.serverexplore.SparkServerlessClusterProvisionSettingsModel;
import com.microsoft.azure.sparkserverless.serverexplore.SparkServerlessClusterUpdateCtrlProvider;
@@ -36,11 +34,8 @@
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
-import java.time.LocalDateTime;
-public class SparkServerlessClusterUpdateDialog extends SparkServerlessProvisionDialog implements ILogger {
- @NotNull
- private final AzureSparkServerlessCluster cluster;
+public class SparkServerlessClusterUpdateDialog extends SparkServerlessProvisionDialog {
private void disableUneditableFields() {
clusterNameField.setEditable(false);
@@ -56,17 +51,21 @@ private void disableUneditableFields() {
private SparkServerlessClusterUpdateCtrlProvider ctrlProvider;
public SparkServerlessClusterUpdateDialog(@NotNull SparkServerlessClusterNode clusterNode,
@NotNull AzureSparkServerlessCluster cluster) {
+
super((SparkServerlessADLAccountNode) clusterNode.getParent(), cluster.getAccount());
- this.cluster = cluster;
this.setTitle("Update Cluster");
disableUneditableFields();
+ getOKAction().setEnabled(false);
ctrlProvider = new SparkServerlessClusterUpdateCtrlProvider(
this, new IdeaSchedulers((Project)clusterNode.getProject()), cluster);
this.getWindow().addWindowListener(new WindowAdapter() {
@Override
public void windowOpened(WindowEvent e) {
ctrlProvider.initialize()
- .subscribe(complete -> {}, err -> log().warn("Error initialize update dialog. " + err.toString()));
+ .subscribe(complete -> {}, err -> {
+ log().warn(String.format("Can't get the cluster %s details: %s", cluster.getName(), err));
+ errorMessageField.setText("Error Loading cluster details");
+ });
super.windowOpened(e);
}
});
@@ -74,7 +73,7 @@ public void windowOpened(WindowEvent e) {
@Override
- protected void enableClusterNameUniquenessCheck() {
+ protected void setClusterNameSets() {
// To avoid cluster already exists tooltips
clusterNameField.setNotAllowedValues(null);
@@ -113,13 +112,6 @@ public void setData(@NotNull SparkServerlessClusterProvisionSettingsModel data)
workerMemoryField.setText(String.valueOf(data.getWorkerMemory()));
workerNumberOfContainersField.setText(String.valueOf(data.getWorkerNumberOfContainers()));
- if (!StringUtils.isEmpty(data.getErrorMessage())) {
- if (!errorMessageDecorator.isExpanded()) {
- errorMessageDecorator.setOn(true);
- }
-
- printLogLine(ConsoleViewContentType.ERROR_OUTPUT, data.getErrorMessage());
- }
- printLogLine(ConsoleViewContentType.NORMAL_OUTPUT, "Cluster guid: " + cluster.getGuid());
+ errorMessageField.setText(data.getErrorMessage());
}
}
diff --git a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.form b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.form
index 89004801b9..4e5b166708 100644
--- a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.form
+++ b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.form
@@ -1,330 +1,316 @@
diff --git a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.java b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.java
index 3a976bded4..8d23758ca9 100644
--- a/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.java
+++ b/PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/sparkserverless/serverexplore/ui/SparkServerlessProvisionDialog.java
@@ -22,15 +22,9 @@
package com.microsoft.azure.sparkserverless.serverexplore.ui;
-import com.intellij.execution.impl.ConsoleViewImpl;
-import com.intellij.execution.ui.ConsoleViewContentType;
-import com.intellij.openapi.actionSystem.ActionManager;
-import com.intellij.openapi.actionSystem.ActionToolbar;
-import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.ui.DocumentAdapter;
-import com.intellij.ui.HideableDecorator;
import com.microsoft.azure.hdinsight.common.logger.ILogger;
import com.microsoft.azure.hdinsight.common.mvc.SettableControl;
import com.microsoft.azure.hdinsight.sdk.common.azure.serverless.AzureSparkServerlessAccount;
@@ -42,20 +36,16 @@
import com.microsoft.azure.sparkserverless.serverexplore.sparkserverlessnode.SparkServerlessADLAccountNode;
import com.microsoft.azuretools.azurecommons.helpers.NotNull;
import com.microsoft.intellij.rxjava.IdeaSchedulers;
-import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
-import java.awt.*;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
-import java.time.LocalDateTime;
import java.util.*;
-import java.util.List;
import java.util.stream.Stream;
public class SparkServerlessProvisionDialog extends DialogWrapper
@@ -86,56 +76,56 @@ public class SparkServerlessProvisionDialog extends DialogWrapper
protected JLabel workerCoresLabel;
protected JLabel workerMemoryLabel;
protected JLabel workerNumberOfContainersLabel;
+ protected JTextField errorMessageField;
protected JPanel provisionDialogPanel;
protected JButton refreshButton;
protected JLabel storageRootPathLabel;
protected JComboBox sparkVersionComboBox;
protected JLabel sparkVersionLabel;
private JXHyperLinkWithUri jobQueueHyperLink;
- protected JPanel errorMessagePanel;
- protected JPanel errorMessagePanelHolder;
- protected JPanel configPanel;
- protected JPanel auPanel;
- protected ConsoleViewImpl consoleViewPanel;
- protected HideableDecorator errorMessageDecorator;
+
@NotNull
private final List allTextFields = Arrays.asList(clusterNameField, sparkEventsField);
@NotNull
private final List allAURelatedFields = Arrays.asList(masterCoresField, workerCoresField,
masterMemoryField, workerMemoryField, workerNumberOfContainersField);
+ protected void setClusterNameSets() {
+ try {
+ clusterNameField.setNotAllowedValues(
+ new HashSet<>(ctrlProvider.getClusterNames().toBlocking().singleOrDefault(new ArrayList<>())));
+
+ sparkEventsField.setPatternAndErrorMessage(null);
+ // The text setting is necessary. By default, '/' is not allowed for TextWithErrorHintedField, leading to
+ // error tooltip. We have to set the text to trigger the validator of the new pattern.
+ sparkEventsField.setText("spark-events/");
+ } catch (Exception ex) {
+ log().warn("Got exceptions when getting cluster names: " + ex);
+ }
+ }
+
public SparkServerlessProvisionDialog(@NotNull SparkServerlessADLAccountNode adlAccountNode,
@NotNull AzureSparkServerlessAccount account) {
// TODO: refactor the design of getProject Method for Node Class
// TODO: get project through ProjectUtils.theProject()
super((Project) adlAccountNode.getProject(), true);
+
this.ctrlProvider = new SparkServerlessClusterProvisionCtrlProvider(
this, new IdeaSchedulers((Project) adlAccountNode.getProject()), account);
this.adlAccountNode = adlAccountNode;
init();
this.setTitle("Provision Spark Cluster");
+ errorMessageField.setBackground(this.provisionDialogPanel.getBackground());
+ errorMessageField.setBorder(BorderFactory.createEmptyBorder());
availableAUField.setBorder(BorderFactory.createEmptyBorder());
totalAUField.setBorder(BorderFactory.createEmptyBorder());
calculatedAUField.setBorder(BorderFactory.createEmptyBorder());
this.setModal(true);
- // make error message widget hideable
- errorMessagePanel.setBorder(BorderFactory.createEmptyBorder());
- errorMessageDecorator = new HideableDecorator(errorMessagePanelHolder, "Log", true);
- errorMessageDecorator.setContentComponent(errorMessagePanel);
- errorMessageDecorator.setOn(false);
-
- // add console view panel to error message panel
- consoleViewPanel = new ConsoleViewImpl((Project) adlAccountNode.getProject(), false);
- errorMessagePanel.add(consoleViewPanel.getComponent(), BorderLayout.CENTER);
- ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar("provisionLog",
- new DefaultActionGroup(consoleViewPanel.createConsoleActions()), false);
- errorMessagePanel.add(toolbar.getComponent(), BorderLayout.WEST);
-
this.jobQueueHyperLink.setURI(account.getJobManagementURI());
-
- this.enableClusterNameUniquenessCheck();
+ // setClusterNameSets to enable cluster name uniqueness check
+ this.setClusterNameSets();
// We can determine the ADL account since we provision on a specific ADL account Node
this.adlAccountField.setText(adlAccountNode.getAdlAccount().getName());
this.storageRootPathLabel.setText(Optional.ofNullable(account.getStorageRootPath()).orElse(""));
@@ -170,20 +160,6 @@ public void windowOpened(WindowEvent e) {
});
}
- protected void enableClusterNameUniquenessCheck() {
- try {
- clusterNameField.setNotAllowedValues(
- new HashSet<>(ctrlProvider.getClusterNames().toBlocking().singleOrDefault(new ArrayList<>())));
-
- sparkEventsField.setPatternAndErrorMessage(null);
- // The text setting is necessary. By default, '/' is not allowed for TextWithErrorHintedField, leading to
- // error tooltip. We have to set the text to trigger the validator of the new pattern.
- sparkEventsField.setText("spark-events/");
- } catch (Exception ex) {
- log().warn("Got exceptions when getting cluster names: " + ex);
- }
- }
-
private void updateAvailableAUAndTotalAU() {
if (!refreshButton.isEnabled()) {
return;
@@ -216,20 +192,10 @@ private void updateCalculatedAU() {
Integer.valueOf(workerNumberOfContainersField.getText()))));
}
- protected void printLogLine(@NotNull ConsoleViewContentType logLevel, @NotNull String log) {
- consoleViewPanel.print(LocalDateTime.now().toString() + " " + logLevel.toString().toUpperCase() + " " + log + "\n", logLevel);
- }
-
// Data -> Components
@Override
public void setData(@NotNull SparkServerlessClusterProvisionSettingsModel data) {
- if (!StringUtils.isEmpty(data.getErrorMessage())) {
- if (!errorMessageDecorator.isExpanded()) {
- errorMessageDecorator.setOn(true);
- }
- printLogLine(ConsoleViewContentType.ERROR_OUTPUT, data.getErrorMessage());
- }
- printLogLine(ConsoleViewContentType.NORMAL_OUTPUT, "Cluster guid: " + data.getClusterGuid());
+ errorMessageField.setText(data.getErrorMessage());
}
// Components -> Data
@@ -246,7 +212,8 @@ public void getData(@NotNull SparkServerlessClusterProvisionSettingsModel data)
.setWorkerCores(workerCoresField.getValue())
.setWorkerMemory(workerMemoryField.getValue())
.setWorkerNumberOfContainers(workerNumberOfContainersField.getValue())
- .setStorageRootPathLabelTitle(storageRootPathLabel.getText());
+ .setStorageRootPathLabelTitle(storageRootPathLabel.getText())
+ .setErrorMessage(errorMessageField.getText());
}
@Override
@@ -263,7 +230,7 @@ protected void doOKAction() {
// TODO: replace load with refreshWithoutAsync
adlAccountNode.load(false);
super.doOKAction();
- }, err -> log().warn("Error provision a cluster. " + err.toString()));
+ }, err -> errorMessageField.setText(String.format("Provision failed. %s", err.getMessage())));
}
@NotNull
diff --git a/Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/sdk/common/azure/serverless/AzureSparkServerlessCluster.java b/Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/sdk/common/azure/serverless/AzureSparkServerlessCluster.java
index b8ebc044f4..85ed72d2a5 100644
--- a/Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/sdk/common/azure/serverless/AzureSparkServerlessCluster.java
+++ b/Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/sdk/common/azure/serverless/AzureSparkServerlessCluster.java
@@ -786,18 +786,13 @@ public Observable uploadToStorage(@NotNull File localFile, @NotNull URI re
@Nullable
public Observable prepareStorageFolder(@NotNull String path) {
return Observable.fromCallable(() -> {
- try {
- String accessToken = getHttp().getAccessToken();
- ADLStoreClient storeClient = ADLStoreClient.createClient(
- URI.create(this.getStorageAccount().getDefaultContainerOrRootPath()).getHost(), accessToken);
- if (storeClient.checkExists(path)) {
- return true;
- } else {
- return storeClient.createDirectory(path);
- }
- } catch (Exception ex) {
- throw new Exception("Failed to create or access spark events log path", ex);
+ String accessToken = getHttp().getAccessToken();
+ ADLStoreClient storeClient = ADLStoreClient.createClient(
+ URI.create(this.getStorageAccount().getDefaultContainerOrRootPath()).getHost(), accessToken);
+ if (!storeClient.checkExists(path)) {
+ return storeClient.createDirectory(path);
}
+ return true;
});
}
}
diff --git a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionCtrlProvider.java b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionCtrlProvider.java
index bc7578cf78..7470ba00ab 100644
--- a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionCtrlProvider.java
+++ b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionCtrlProvider.java
@@ -109,17 +109,28 @@ public Observable> getAvailableAUAndTotalAU() {
}
@NotNull
- private Observable buildCluster(@NotNull SparkServerlessClusterProvisionSettingsModel toUpdate) {
- return Observable.just(new AzureSparkServerlessCluster.Builder(account)
- .name(toUpdate.getClusterName())
- .masterPerInstanceCores(toUpdate.getMasterCores())
- .masterPerInstanceMemory(toUpdate.getMasterMemory())
- .workerPerInstanceCores(toUpdate.getWorkerCores())
- .workerPerInstanceMemory(toUpdate.getWorkerMemory())
- .workerInstances(toUpdate.getWorkerNumberOfContainers())
- .sparkEventsPath(toUpdate.getSparkEvents())
- .userStorageAccount(account.getDetailResponse().defaultDataLakeStoreAccount())
- .build());
+ private SparkServerlessClusterProvisionSettingsModel provisionCluster(
+ @NotNull SparkServerlessClusterProvisionSettingsModel toUpdate) {
+ if (!StringUtils.isEmpty(toUpdate.getErrorMessage())) {
+ return toUpdate;
+ }
+
+ try {
+ AzureSparkServerlessCluster cluster =
+ (AzureSparkServerlessCluster) new AzureSparkServerlessCluster.Builder(account)
+ .name(toUpdate.getClusterName())
+ .masterPerInstanceCores(toUpdate.getMasterCores())
+ .masterPerInstanceMemory(toUpdate.getMasterMemory())
+ .workerPerInstanceCores(toUpdate.getWorkerCores())
+ .workerPerInstanceMemory(toUpdate.getWorkerMemory())
+ .workerInstances(toUpdate.getWorkerNumberOfContainers())
+ .sparkEventsPath(toUpdate.getSparkEvents())
+ .userStorageAccount(account.getDetailResponse().defaultDataLakeStoreAccount())
+ .build().provision().toBlocking().single();
+ } catch (Exception e) {
+ return toUpdate.setErrorMessage("Provision failed: " + e.getMessage());
+ }
+ return toUpdate;
}
public Observable validateAndProvision() {
@@ -128,16 +139,7 @@ public Observable validateAndProvi
.doOnNext(controllableView::getData)
.observeOn(ideSchedulers.processBarVisibleAsync("Provisioning cluster..."))
.map(toUpdate -> toUpdate.setErrorMessage(null))
- .flatMap(toUpdate ->
- buildCluster(toUpdate)
- .doOnNext(cluster -> toUpdate.setClusterGuid(cluster.getGuid()))
- .flatMap(cluster -> cluster.provision())
- .map(cluster -> toUpdate)
- .onErrorReturn(err -> {
- log().warn("Error provision a cluster. " + err.toString());
- return toUpdate.setErrorMessage(err.getMessage());
- })
- )
+ .map(toUpdate -> provisionCluster(toUpdate))
.observeOn(ideSchedulers.dispatchUIThread())
.doOnNext(controllableView::setData)
.filter(data -> StringUtils.isEmpty(data.getErrorMessage()));
diff --git a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionSettingsModel.java b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionSettingsModel.java
index 82bf325e6c..1d0fd386e9 100644
--- a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionSettingsModel.java
+++ b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterProvisionSettingsModel.java
@@ -47,8 +47,6 @@ public class SparkServerlessClusterProvisionSettingsModel implements Cloneable {
private int totalAU;
@NotNull
private int calculatedAU;
- @Nullable
- private String clusterGuid;
@NotNull
private String storageRootPathLabelTitle;
@@ -171,11 +169,6 @@ public int getWorkerNumberOfContainers() {
return workerNumberOfContainers;
}
- @Nullable
- public String getClusterGuid() {
- return clusterGuid;
- }
-
@NotNull
public SparkServerlessClusterProvisionSettingsModel setWorkerNumberOfContainers(
@NotNull int workerNumberOfContainers) {
@@ -206,11 +199,6 @@ public SparkServerlessClusterProvisionSettingsModel setErrorMessage(@Nullable St
return this;
}
- public SparkServerlessClusterProvisionSettingsModel setClusterGuid(@NotNull String clusterGuid) {
- this.clusterGuid = clusterGuid;
- return this;
- }
-
@Override
protected Object clone() throws CloneNotSupportedException {
// Here is a shadow clone, not deep clone
diff --git a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterUpdateCtrlProvider.java b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterUpdateCtrlProvider.java
index 8455287ae4..bfebcd3ffb 100644
--- a/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterUpdateCtrlProvider.java
+++ b/Utils/hdinsight-node-common/src/com/microsoft/azure/sparkserverless/serverexplore/SparkServerlessClusterUpdateCtrlProvider.java
@@ -63,10 +63,7 @@ public Observable validateAndUpdat
.flatMap(toUpdate ->
cluster.update(toUpdate.getWorkerNumberOfContainers())
.map(cluster -> toUpdate)
- .onErrorReturn(err -> {
- log().warn("Error update a cluster. " + err.toString());
- return toUpdate.setErrorMessage(err.getMessage());
- }))
+ .onErrorReturn(err -> toUpdate.setErrorMessage(err.getMessage())))
.observeOn(ideSchedulers.dispatchUIThread())
.doOnNext(controllableView::setData)
.filter(data -> StringUtils.isEmpty(data.getErrorMessage()));