diff --git a/azure-pipelines-e2e-tests-template.yml b/azure-pipelines-e2e-tests-template.yml
index 6f926f9d7..8fe5de8c5 100644
--- a/azure-pipelines-e2e-tests-template.yml
+++ b/azure-pipelines-e2e-tests-template.yml
@@ -122,14 +122,14 @@ stages:
script: |
echo "Download Hadoop utils for Windows."
$hadoopBinaryUrl = "https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip"
- # Spark 3.3.3 version binary use Hadoop3 dependency
- if ("3.3.3" -contains "${{ test.version }}") {
+ # Spark 3.3.0+ version binary uses Hadoop3 dependency
+ if ([version]"3.3.0" -le [version]"${{ test.version }}") {
$hadoopBinaryUrl = "https://github.com/SparkSnail/winutils/releases/download/hadoop-3.3.5/hadoop-3.3.5.zip"
}
curl -k -L -o hadoop.zip $hadoopBinaryUrl
Expand-Archive -Path hadoop.zip -Destination .
New-Item -ItemType Directory -Force -Path hadoop\bin
- if ("3.3.3" -contains "${{ test.version }}") {
+ if ([version]"3.3.0" -le [version]"${{ test.version }}") {
cp hadoop-3.3.5\winutils.exe hadoop\bin
# Hadoop 3.3 need to add hadoop.dll to environment varibles to avoid UnsatisfiedLinkError
cp hadoop-3.3.5\hadoop.dll hadoop\bin
@@ -142,12 +142,8 @@ stages:
- pwsh: |
echo "Downloading Spark ${{ test.version }}"
$sparkBinaryName = "spark-${{ test.version }}-bin-hadoop2.7"
- # In spark 3.3.0, 3.3.1, 3.3.2, 3.3.4, the binary name with hadoop2 dependency has changed to spark-${{ test.version }}-bin-hadoop2.tgz
- if ("3.3.0", "3.3.1", "3.3.2", "3.3.4" -contains "${{ test.version }}") {
- $sparkBinaryName = "spark-${{ test.version }}-bin-hadoop2"
- }
- # In spark 3.3.3, the binary don't provide hadoop2 version, so we use hadoop3 version
- if ("3.3.3" -contains "${{ test.version }}") {
+ # Spark 3.3.0+ uses Hadoop3
+ if ([version]"3.3.0" -le [version]"${{ test.version }}") {
$sparkBinaryName = "spark-${{ test.version }}-bin-hadoop3"
}
curl -k -L -o spark-${{ test.version }}.tgz https://archive.apache.org/dist/spark/spark-${{ test.version }}/${sparkBinaryName}.tgz
diff --git a/azure-pipelines-pr.yml b/azure-pipelines-pr.yml
index d213cc574..f22b54cc1 100644
--- a/azure-pipelines-pr.yml
+++ b/azure-pipelines-pr.yml
@@ -31,7 +31,7 @@ variables:
backwardCompatibleTestOptions_Linux_3_1: ""
forwardCompatibleTestOptions_Linux_3_1: ""
- # Skip all forward/backward compatibility tests since Spark 3.2 is not supported before this release.
+ # Skip all forward/backward compatibility tests since Spark 3.2 and 3.5 are not supported before this release.
backwardCompatibleTestOptions_Windows_3_2: "--filter FullyQualifiedName=NONE"
forwardCompatibleTestOptions_Windows_3_2: $(backwardCompatibleTestOptions_Windows_3_2)
backwardCompatibleTestOptions_Linux_3_2: $(backwardCompatibleTestOptions_Windows_3_2)
@@ -41,6 +41,11 @@ variables:
forwardCompatibleTestOptions_Windows_3_3: $(backwardCompatibleTestOptions_Windows_3_3)
backwardCompatibleTestOptions_Linux_3_3: $(backwardCompatibleTestOptions_Windows_3_3)
forwardCompatibleTestOptions_Linux_3_3: $(backwardCompatibleTestOptions_Windows_3_3)
+
+ backwardCompatibleTestOptions_Windows_3_5: "--filter FullyQualifiedName=NONE"
+ forwardCompatibleTestOptions_Windows_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
+ backwardCompatibleTestOptions_Linux_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions_Linux_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
# Azure DevOps variables are transformed into environment variables, with these variables we
# avoid the first time experience and telemetry to speed up the build.
@@ -73,6 +78,11 @@ parameters:
- '3.3.2'
- '3.3.3'
- '3.3.4'
+ - '3.5.0'
+ - '3.5.1'
+ - '3.5.2'
+ - '3.5.3'
+
# List of OS types to run E2E tests, run each test in both 'Windows' and 'Linux' environments
- name: listOfE2ETestsPoolTypes
type: object
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 384ec0779..a6ae0b15b 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -31,12 +31,17 @@ variables:
backwardCompatibleTestOptions_Linux_3_1: ""
forwardCompatibleTestOptions_Linux_3_1: ""
- # Skip all forward/backward compatibility tests since Spark 3.2 is not supported before this release.
+ # Skip all forward/backward compatibility tests since Spark 3.2 and 3.5 are not supported before this release.
backwardCompatibleTestOptions_Windows_3_2: "--filter FullyQualifiedName=NONE"
forwardCompatibleTestOptions_Windows_3_2: $(backwardCompatibleTestOptions_Windows_3_2)
backwardCompatibleTestOptions_Linux_3_2: $(backwardCompatibleTestOptions_Windows_3_2)
forwardCompatibleTestOptions_Linux_3_2: $(backwardCompatibleTestOptions_Windows_3_2)
+ backwardCompatibleTestOptions_Windows_3_5: "--filter FullyQualifiedName=NONE"
+ forwardCompatibleTestOptions_Windows_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
+ backwardCompatibleTestOptions_Linux_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions_Linux_3_5: $(backwardCompatibleTestOptions_Windows_3_5)
+
# Azure DevOps variables are transformed into environment variables, with these variables we
# avoid the first time experience and telemetry to speed up the build.
DOTNET_CLI_TELEMETRY_OPTOUT: 1
@@ -413,3 +418,51 @@ stages:
testOptions: ""
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_2)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_2)
+ - version: '3.5.0'
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
+ jobOptions:
+ - pool: 'Windows'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_5)
+ - pool: 'Linux'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_5)
+ - version: '3.5.1'
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
+ jobOptions:
+ - pool: 'Windows'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_5)
+ - pool: 'Linux'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_5)
+ - version: '3.5.2'
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
+ jobOptions:
+ - pool: 'Windows'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_5)
+ - pool: 'Linux'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_5)
+ - version: '3.5.3'
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
+ jobOptions:
+ - pool: 'Windows'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_5)
+ - pool: 'Linux'
+ testOptions: ""
+ backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_5)
+ forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_5)
\ No newline at end of file
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaFixture.cs b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaFixture.cs
index c893336f3..54a3e886b 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaFixture.cs
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaFixture.cs
@@ -27,6 +27,7 @@ public DeltaFixture()
(3, 3, 2) => "delta-core_2.12:2.3.0",
(3, 3, 3) => "delta-core_2.12:2.3.0",
(3, 3, 4) => "delta-core_2.12:2.3.0",
+ (3, 5, _) => "delta-spark_2.12:3.2.0",
_ => throw new NotSupportedException($"Spark {sparkVersion} not supported.")
};
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
index 9b87c39d0..0044c3ec4 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
@@ -57,16 +57,22 @@ public void TestSignaturesV2_4_X()
///
/// Test signatures for APIs introduced in Spark 3.1.*.
+ /// In Spark 3.5 Spark throws an exception when trying to delete
+ /// archive.zip from temp folder, and causes failures of other tests
///
- [SkipIfSparkVersionIsLessThan(Versions.V3_1_0)]
+ [SkipIfSparkVersionIsNotInRange(Versions.V3_1_0, Versions.V3_3_0)]
public void TestSignaturesV3_1_X()
{
SparkContext sc = SparkContext.GetOrCreate(new SparkConf());
string archivePath = $"{TestEnvironment.ResourceDirectory}archive.zip";
+
sc.AddArchive(archivePath);
- Assert.IsType(sc.ListArchives().ToArray());
+ var archives = sc.ListArchives().ToArray();
+
+ Assert.IsType(archives);
+ Assert.NotEmpty(archives.Where(a => a.EndsWith("archive.zip")));
}
}
}
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
index f5f37dd91..630fb3c54 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
@@ -59,7 +59,6 @@ public void TestSignaturesV2_4_X()
Assert.IsType(catalog.FunctionExists("functionname"));
Assert.IsType(catalog.GetDatabase("default"));
Assert.IsType(catalog.GetFunction("abs"));
- Assert.IsType(catalog.GetFunction(null, "abs"));
Assert.IsType(catalog.GetTable("users"));
Assert.IsType(catalog.GetTable("default", "users"));
Assert.IsType(catalog.IsCached("users"));
diff --git a/src/csharp/Microsoft.Spark.UnitTest/TypeConverterTests.cs b/src/csharp/Microsoft.Spark.UnitTest/TypeConverterTests.cs
index 332e0d29b..34fec9f96 100644
--- a/src/csharp/Microsoft.Spark.UnitTest/TypeConverterTests.cs
+++ b/src/csharp/Microsoft.Spark.UnitTest/TypeConverterTests.cs
@@ -20,6 +20,7 @@ public void TestBaseCase()
Assert.Equal((short)1, TypeConverter.ConvertTo((short)1));
Assert.Equal((ushort)1, TypeConverter.ConvertTo((ushort)1));
Assert.Equal(1, TypeConverter.ConvertTo(1));
+ Assert.Equal(1L, TypeConverter.ConvertTo(1));
Assert.Equal(1u, TypeConverter.ConvertTo(1u));
Assert.Equal(1L, TypeConverter.ConvertTo(1L));
Assert.Equal(1ul, TypeConverter.ConvertTo(1ul));
diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/PayloadWriter.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/PayloadWriter.cs
index 4798950c4..a96d6130b 100644
--- a/src/csharp/Microsoft.Spark.Worker.UnitTest/PayloadWriter.cs
+++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/PayloadWriter.cs
@@ -351,6 +351,7 @@ internal PayloadWriter Create(Version version = null)
new BroadcastVariableWriterV2_4_X(),
new CommandWriterV2_4_X());
case Versions.V3_3_0:
+ case Versions.V3_5_1:
return new PayloadWriter(
version,
new TaskContextWriterV3_3_X(),
diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/TestData.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/TestData.cs
index b7a751317..a4e6f49d0 100644
--- a/src/csharp/Microsoft.Spark.Worker.UnitTest/TestData.cs
+++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/TestData.cs
@@ -20,6 +20,7 @@ public static IEnumerable