diff --git a/README.md b/README.md
index dab822d3d..768a40713 100644
--- a/README.md
+++ b/README.md
@@ -83,8 +83,8 @@ Building from source is very easy and the whole process (from cloning to being a
| | | Instructions |
| :---: | :--- | :--- |
-| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** |
- Local - [.NET Framework 4.6.1](docs/building/windows-instructions.md#using-visual-studio-for-net-framework-461)
- Local - [.NET Core 2.1.x](docs/building/windows-instructions.md#using-net-core-cli-for-net-core-21x)
|
-| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** | - Local - [.NET Core 2.1.x](docs/building/ubuntu-instructions.md)
- [Azure HDInsight Spark - .NET Core 2.1.x](deployment/README.md)
|
+| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** | - Local - [.NET Framework 4.6.1](docs/building/windows-instructions.md#using-visual-studio-for-net-framework-461)
- Local - [.NET Core 3.1](docs/building/windows-instructions.md#using-net-core-cli-for-net-core)
|
+| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** | - Local - [.NET Core 3.1](docs/building/ubuntu-instructions.md)
- [Azure HDInsight Spark - .NET Core 3.1](deployment/README.md)
|
## Samples
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 94896d2e7..4e531db68 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -24,6 +24,8 @@ jobs:
variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
_OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
+ HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
+ DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp3.1\win-x64
steps:
- task: Maven@3
@@ -65,8 +67,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.3.0-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.3.1'
@@ -76,8 +76,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.3.1-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.3.2'
@@ -87,8 +85,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.3.2-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.3.3'
@@ -98,8 +94,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.3.3-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.3.4'
@@ -109,8 +103,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.3.4-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.4.0'
@@ -120,8 +112,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.0-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.4.1'
@@ -131,8 +121,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.1-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.4.3'
@@ -142,8 +130,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.3-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 2.4.4'
@@ -153,8 +139,6 @@ jobs:
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.4-bin-hadoop2.7
- HADOOP_HOME: $(Build.BinariesDirectory)\hadoop
- DOTNET_WORKER_DIR: $(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker\netcoreapp2.1\win-x64
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
diff --git a/benchmark/csharp/Tpch/Tpch.csproj b/benchmark/csharp/Tpch/Tpch.csproj
index 499fd7916..7985a31b0 100644
--- a/benchmark/csharp/Tpch/Tpch.csproj
+++ b/benchmark/csharp/Tpch/Tpch.csproj
@@ -2,13 +2,8 @@
Exe
- net461;netcoreapp2.1
- netcoreapp2.1
-
-
+ net461;netcoreapp3.1
+ netcoreapp3.1
Tpch
Tpch
@@ -22,7 +17,7 @@
-
+
true
diff --git a/deployment/HDI-Spark/Notebooks/install-interactive-notebook.sh b/deployment/HDI-Spark/Notebooks/install-interactive-notebook.sh
index 74750a8a9..0ca036e6b 100644
--- a/deployment/HDI-Spark/Notebooks/install-interactive-notebook.sh
+++ b/deployment/HDI-Spark/Notebooks/install-interactive-notebook.sh
@@ -46,7 +46,7 @@ else
sudo add-apt-repository universe
sudo apt-get -yq install apt-transport-https
sudo apt-get -yq update
- sudo apt-get -yq install dotnet-sdk-3.0
+ sudo apt-get -yq install dotnet-sdk-3.1
sudo dotnet tool uninstall dotnet-try --tool-path /usr/share/dotnet-tools || true
sudo dotnet tool install dotnet-try --add-source https://dotnet.myget.org/F/dotnet-try/api/v3/index.json --tool-path /usr/share/dotnet-tools --version 1.0.19473.13
diff --git a/docs/building/ubuntu-instructions.md b/docs/building/ubuntu-instructions.md
index cd1b25aa6..2884d1a7e 100644
--- a/docs/building/ubuntu-instructions.md
+++ b/docs/building/ubuntu-instructions.md
@@ -16,7 +16,7 @@ Building Spark .NET on Ubuntu 18.04
If you already have all the pre-requisites, skip to the [build](ubuntu-instructions.md#building) steps below.
- 1. Download and install **[.NET Core 2.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/2.1)** or the **[.NET Core 3.0 preview SDK](https://dotnet.microsoft.com/download/dotnet-core/3.0)** - installing the SDK will add the `dotnet` toolchain to your path.
+ 1. Download and install **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** - installing the SDK will add the `dotnet` toolchain to your path.
2. Install **[OpenJDK 8](https://openjdk.java.net/install/)**
- You can use the following command:
```bash
@@ -121,21 +121,21 @@ You should see JARs created for the supported Spark versions:
1. Build the Worker
```bash
cd ~/dotnet.spark/src/csharp/Microsoft.Spark.Worker/
- dotnet publish -f netcoreapp2.1 -r ubuntu.18.04-x64
+ dotnet publish -f netcoreapp3.1 -r linux-x64
```
📙 Click to see sample console output
```bash
- user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f netcoreapp2.1 -r ubuntu.18.04-x64
+ user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f netcoreapp3.1 -r linux-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 36.03 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj.
Restore completed in 35.94 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj.
Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll
- Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp2.1/ubuntu.18.04-x64/Microsoft.Spark.Worker.dll
- Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp2.1/ubuntu.18.04-x64/publish/
+ Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/Microsoft.Spark.Worker.dll
+ Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/publish/
```
@@ -143,21 +143,21 @@ You should see JARs created for the supported Spark versions:
2. Build the Samples
```bash
cd ~/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/
- dotnet publish -f netcoreapp2.1 -r ubuntu.18.04-x64
+ dotnet publish -f netcoreapp3.1 -r linux-x64
```
📙 Click to see sample console output
```bash
- user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f netcoreapp2.1 -r ubuntu.18.04-x64
+ user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f netcoreapp3.1 -r linux-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 37.11 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj.
Restore completed in 281.63 ms for /home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj.
Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll
- Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp2.1/ubuntu.18.04-x64/Microsoft.Spark.CSharp.Examples.dll
- Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp2.1/ubuntu.18.04-x64/publish/
+ Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/Microsoft.Spark.CSharp.Examples.dll
+ Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/publish/
```
@@ -166,8 +166,8 @@ You should see JARs created for the supported Spark versions:
Once you build the samples, you can use `spark-submit` to submit your .NET Core apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
- 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp2.1/ubuntu.18.04-x64/publish`)
- 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp2.1/ubuntu.18.04-x64/publish`)
+ 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/publish`)
+ 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/publish`)
3. Running your app follows the basic structure:
```bash
spark-submit \
diff --git a/docs/building/windows-instructions.md b/docs/building/windows-instructions.md
index c3621208b..3a72b2a99 100644
--- a/docs/building/windows-instructions.md
+++ b/docs/building/windows-instructions.md
@@ -20,8 +20,8 @@ Building Spark .NET on Windows
If you already have all the pre-requisites, skip to the [build](windows-instructions.md#building) steps below.
- 1. Download and install the **[.NET Core SDK](https://dotnet.microsoft.com/download/dotnet-core/2.1)** - installing the SDK will add the `dotnet` toolchain to your path. .NET Core 2.1, 2.2 and 3.0 preview are supported.
- 2. Install **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** (Version 16.3 or later). The Community version is completely free. When configuring your installation, include these components at minimum:
+ 1. Download and install the **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** - installing the SDK will add the `dotnet` toolchain to your path.
+ 2. Install **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** (Version 16.4 or later). The Community version is completely free. When configuring your installation, include these components at minimum:
* .NET desktop development
* All Required Components
* .NET Framework 4.6.1 Development Tools
@@ -154,42 +154,42 @@ You should see JARs created for the supported Spark versions:
1. Build the Worker
```powershell
cd C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\
- dotnet publish -f netcoreapp2.1 -r win10-x64
+ dotnet publish -f netcoreapp3.1 -r win-x64
```
📙 Click to see sample console output
```powershell
- PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f netcoreapp2.1 -r win10-x64
+ PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f netcoreapp3.1 -r win-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 299.95 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj.
Restore completed in 306.62 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\Microsoft.Spark.Worker.csproj.
Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll
- Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp2.1\win10-x64\Microsoft.Spark.Worker.dll
- Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp2.1\win10-x64\publish\
+ Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\Microsoft.Spark.Worker.dll
+ Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\publish\
```
2. Build the Samples
```powershell
cd C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\
- dotnet publish -f netcoreapp2.1 -r win10-x64
+ dotnet publish -f netcoreapp3.1 -r win-x64
```
📙 Click to see sample console output
```powershell
- PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f netcoreapp2.1 -r win10-x64
+ PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f netcoreapp3.1 -r win10-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 44.22 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj.
Restore completed in 336.94 ms for C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\Microsoft.Spark.CSharp.Examples.csproj.
Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll
- Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp2.1\win10-x64\Microsoft.Spark.CSharp.Examples.dll
- Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp2.1\win10-x64\publish\
+ Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win-x64\Microsoft.Spark.CSharp.Examples.dll
+ Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win-x64\publish\
```
@@ -198,8 +198,8 @@ You should see JARs created for the supported Spark versions:
Once you build the samples, running them will be through `spark-submit` regardless of whether you are targeting .NET Framework or .NET Core apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
- 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp2.1\win10-x64\publish` for .NET Core)
- 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp2.1\win10-x64\publish` for .NET Core)
+ 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\publish` for .NET Core)
+ 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win1-x64\publish` for .NET Core)
3. Running your app follows the basic structure:
```powershell
spark-submit.cmd `
diff --git a/docs/getting-started/ubuntu-instructions.md b/docs/getting-started/ubuntu-instructions.md
index dacde5dc3..4821bbec6 100644
--- a/docs/getting-started/ubuntu-instructions.md
+++ b/docs/getting-started/ubuntu-instructions.md
@@ -4,7 +4,7 @@ These instructions will show you how to run a .NET for Apache Spark app using .N
## Pre-requisites
-- Download and install the following: **[.NET Core 2.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/2.1)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
+- Download and install the following: **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
- Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release:
- Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `~/bin/Microsoft.Spark.Worker`).
- **IMPORTANT** Create a [new environment variable](https://help.ubuntu.com/community/EnvironmentVariables) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `~/bin/Microsoft.Spark.Worker`).
diff --git a/docs/getting-started/windows-instructions.md b/docs/getting-started/windows-instructions.md
index 480dd0c71..698ca8b94 100644
--- a/docs/getting-started/windows-instructions.md
+++ b/docs/getting-started/windows-instructions.md
@@ -4,7 +4,7 @@ These instructions will show you how to run a .NET for Apache Spark app using .N
## Pre-requisites
-- Download and install the following: **[.NET Core 2.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/2.1)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
+- Download and install the following: **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
- Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release:
- Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `c:\bin\Microsoft.Spark.Worker\`).
- **IMPORTANT** Create a [new environment variable](https://www.java.com/en/download/help/path.xml) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `c:\bin\Microsoft.Spark.Worker`).
diff --git a/eng/AfterSolutionBuild.targets b/eng/AfterSolutionBuild.targets
index bad3e0f99..d9dfa692c 100644
--- a/eng/AfterSolutionBuild.targets
+++ b/eng/AfterSolutionBuild.targets
@@ -6,9 +6,9 @@
<_PublishTarget Include="FullFramework" Framework="net461" RuntimeId="win-x64" />
- <_PublishTarget Include="WindowsCore" Framework="netcoreapp2.1" RuntimeId="win-x64" />
- <_PublishTarget Include="LinuxCore" Framework="netcoreapp2.1" RuntimeId="linux-x64" />
- <_PublishTarget Include="MacOSCore" Framework="netcoreapp2.1" RuntimeId="osx-x64" />
+ <_PublishTarget Include="WindowsCore" Framework="netcoreapp3.1" RuntimeId="win-x64" />
+ <_PublishTarget Include="LinuxCore" Framework="netcoreapp3.1" RuntimeId="linux-x64" />
+ <_PublishTarget Include="MacOSCore" Framework="netcoreapp3.1" RuntimeId="osx-x64" />
Exe
- net461;netcoreapp2.1
- netcoreapp2.1
+ net461;netcoreapp3.1
+ netcoreapp3.1
Microsoft.Spark.Examples
Microsoft.Spark.CSharp.Examples
diff --git a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
index e52ccae7d..85ea23e1c 100644
--- a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
+++ b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
@@ -2,8 +2,8 @@
Exe
- net461;netcoreapp2.1
- netcoreapp2.1
+ net461;netcoreapp3.1
+ netcoreapp3.1
Microsoft.Spark.Examples
Microsoft.Spark.FSharp.Examples
false
diff --git a/global.json b/global.json
index bfc39435d..7942d69f9 100644
--- a/global.json
+++ b/global.json
@@ -1,6 +1,6 @@
{
"tools": {
- "dotnet": "2.2.203"
+ "dotnet": "3.0.100"
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19257.7"
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
index f0bceb763..42f6304d8 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
@@ -1,7 +1,7 @@
- netcoreapp2.1
+ netcoreapp3.1
false
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta/Microsoft.Spark.Extensions.Delta.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta/Microsoft.Spark.Extensions.Delta.csproj
index 6d06d0469..7cbe05021 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta/Microsoft.Spark.Extensions.Delta.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta/Microsoft.Spark.Extensions.Delta.csproj
@@ -1,7 +1,7 @@
- netstandard2.0
+ netstandard2.0;netstandard2.1
Microsoft.Spark.Extensions.Delta
true
true
diff --git a/src/csharp/Microsoft.Spark.E2ETest.ExternalLibrary/Microsoft.Spark.E2ETest.ExternalLibrary.csproj b/src/csharp/Microsoft.Spark.E2ETest.ExternalLibrary/Microsoft.Spark.E2ETest.ExternalLibrary.csproj
index 81580bb59..186e1beb4 100644
--- a/src/csharp/Microsoft.Spark.E2ETest.ExternalLibrary/Microsoft.Spark.E2ETest.ExternalLibrary.csproj
+++ b/src/csharp/Microsoft.Spark.E2ETest.ExternalLibrary/Microsoft.Spark.E2ETest.ExternalLibrary.csproj
@@ -1,4 +1,4 @@
-
+
netstandard2.0
diff --git a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
index b64c9109e..abe436ec9 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
+++ b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
@@ -1,7 +1,7 @@
- netcoreapp2.1
+ netcoreapp3.1
diff --git a/src/csharp/Microsoft.Spark.Experimental/Microsoft.Spark.Experimental.csproj b/src/csharp/Microsoft.Spark.Experimental/Microsoft.Spark.Experimental.csproj
index ff1256b8e..78fcb754b 100644
--- a/src/csharp/Microsoft.Spark.Experimental/Microsoft.Spark.Experimental.csproj
+++ b/src/csharp/Microsoft.Spark.Experimental/Microsoft.Spark.Experimental.csproj
@@ -1,4 +1,4 @@
-
+
netstandard2.0
diff --git a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
index 4a0f5301e..e608330a7 100644
--- a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
+++ b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
@@ -1,7 +1,7 @@
- netcoreapp2.1
+ netcoreapp3.1
Microsoft.Spark.UnitTest
@@ -10,11 +10,6 @@
-
-
-
-
-
diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
index 67c1efaba..1b68d2e45 100644
--- a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
+++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
@@ -1,7 +1,7 @@
- netcoreapp2.1
+ netcoreapp3.1
diff --git a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
index 9d60ea2e9..cff20b084 100644
--- a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
+++ b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
@@ -2,8 +2,8 @@
Exe
- net461;netcoreapp2.1
- netcoreapp2.1
+ net461;netcoreapp3.1
+ netcoreapp3.1
Microsoft.Spark.Worker
true
@@ -16,7 +16,7 @@
-
+
diff --git a/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj b/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj
index 297e3eb41..d473408b1 100644
--- a/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj
+++ b/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj
@@ -1,7 +1,7 @@
- netstandard2.0;netcoreapp2.1;netstandard2.1
+ netstandard2.0;netstandard2.1
Microsoft.Spark
true
true