1
1
parameters :
2
- - name : ' versions '
2
+ - name : tests
3
3
type : object
4
4
default : {}
5
- - name : ' testOptions '
5
+ - name : backwardCompatibleRelease
6
6
type : string
7
7
default : ' '
8
+ - name : forwardCompatibleRelease
9
+ type : string
10
+ default : ' '
11
+
8
12
9
13
stages :
10
- - ${{ each version in parameters.versions }} :
11
- - stage : E2E_Tests_${{ replace(version, '.', '_') }}
12
- displayName : E2E tests for Spark ${{ version }}
14
+ - ${{ each test in parameters.tests }} :
15
+ - stage : E2E_Tests_${{ replace(test. version, '.', '_') }}
16
+ displayName : E2E tests for Spark ${{ test. version }}
13
17
dependsOn : Build
14
18
jobs :
15
- - job : Run
16
- pool : Hosted VS2017
17
-
18
- variables :
19
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }} :
20
- _OfficialBuildIdArgs : /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
21
- HADOOP_HOME : $(Build.BinariesDirectory)\hadoop
22
- DOTNET_WORKER_DIR : $(CurrentDotnetWorkerDir)
23
-
24
- steps :
25
- - task : DownloadBuildArtifacts@0
26
- displayName : Download Build Artifacts
27
- inputs :
28
- artifactName : Microsoft.Spark.Binaries
29
- downloadPath : $(Build.ArtifactStagingDirectory)
30
-
31
- - task : CopyFiles@2
32
- displayName : Copy jars
33
- inputs :
34
- sourceFolder : $(ArtifactPath)/Jars
35
- contents : ' **/*.jar'
36
- targetFolder : $(Build.SourcesDirectory)/src/scala
37
-
38
- - task : BatchScript@1
39
- displayName : Download Winutils.exe
40
- inputs :
41
- filename : script\download-hadoop-utils.cmd
42
- arguments : $(Build.BinariesDirectory)
43
-
44
- - task : BatchScript@1
45
- displayName : ' Download Spark Distro ${{ version }}'
46
- inputs :
47
- filename : script\download-spark-distros.cmd
48
- arguments : $(Build.BinariesDirectory) ${{ version }}
49
-
50
- - task : DotNetCoreCLI@2
51
- displayName : ' E2E tests'
52
- inputs :
53
- command : test
54
- projects : ' **/Microsoft.Spark*.E2ETest/*.csproj'
55
- arguments : ' --configuration $(buildConfiguration) ${{ parameters.testOptions }}'
56
- env :
57
- SPARK_HOME : $(Build.BinariesDirectory)\spark-${{ version }}-bin-hadoop2.7
19
+ - ${{ each option in test.jobOptions }} :
20
+ - job : Run_${{ replace(option.pool, ' ', '_') }}
21
+ pool : ${{ option.pool }}
22
+
23
+ variables :
24
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }} :
25
+ _OfficialBuildIdArgs : /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
26
+
27
+ steps :
28
+ - task : DownloadBuildArtifacts@0
29
+ displayName : Download Build Artifacts
30
+ inputs :
31
+ artifactName : Microsoft.Spark.Binaries
32
+ downloadPath : $(Build.ArtifactStagingDirectory)
33
+
34
+ - pwsh : |
35
+ $framework = "netcoreapp3.1"
36
+
37
+ if ($env:AGENT_OS -eq 'Windows_NT') {
38
+ $runtimeIdentifier = "win-x64"
39
+ } else {
40
+ $runtimeIdentifier = "linux-x64"
41
+ }
42
+
43
+ $pathSeparator = [IO.Path]::DirectorySeparatorChar
44
+ $artifactPath = "$(Build.ArtifactStagingDirectory)${pathSeparator}Microsoft.Spark.Binaries"
45
+ echo "##vso[task.setvariable variable=PATH_SEPARATOR]$pathSeparator"
46
+ echo "##vso[task.setvariable variable=ArtifactPath]$artifactPath"
47
+
48
+ $backwardCompatibleRelease = "${{ parameters.backwardCompatibleRelease }}"
49
+ echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR]$(Build.BinariesDirectory)${pathSeparator}Microsoft.Spark.Worker-${backwardCompatibleRelease}"
50
+ echo "##vso[task.setvariable variable=BACKWARD_COMPATIBLE_WORKER_URL]https://github.com/dotnet/spark/releases/download/v${backwardCompatibleRelease}/Microsoft.Spark.Worker.${framework}.${runtimeIdentifier}-${backwardCompatibleRelease}.zip"
51
+
52
+ $dotnetWorkerDir = "${artifactPath}${pathSeparator}Microsoft.Spark.Worker${pathSeparator}${framework}${pathSeparator}${runtimeIdentifier}"
53
+ echo "##vso[task.setvariable variable=CURRENT_DOTNET_WORKER_DIR]$dotnetWorkerDir"
54
+ if ($env:AGENT_OS -eq 'Linux') {
55
+ chmod +x "${dotnetWorkerDir}${pathSeparator}Microsoft.Spark.Worker"
56
+ }
57
+ displayName: 'Setup Variables and Permissions'
58
+
59
+ - checkout : self
60
+ path : s$(PATH_SEPARATOR)dotnet-spark
61
+
62
+ - task : CopyFiles@2
63
+ displayName : Copy jars
64
+ inputs :
65
+ sourceFolder : $(ArtifactPath)$(PATH_SEPARATOR)Jars
66
+ contents : ' **$(PATH_SEPARATOR)*.jar'
67
+ targetFolder : $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala
68
+
69
+ - task : PowerShell@2
70
+ condition : eq( variables['Agent.OS'], 'Windows_NT' )
71
+ displayName : Download Winutils.exe
72
+ inputs :
73
+ workingDirectory : $(Build.BinariesDirectory)
74
+ pwsh : true
75
+ targetType : inline
76
+ script : |
77
+ echo "Download Hadoop utils for Windows."
78
+ curl -k -L -o hadoop.zip https://github.com/steveloughran/winutils/releases/download/tag_2017-08-29-hadoop-2.8.1-native/hadoop-2.8.1.zip
79
+ unzip hadoop.zip
80
+ New-Item -ItemType Directory -Force -Path hadoop\bin
81
+ cp hadoop-2.8.1\winutils.exe hadoop\bin
82
+
83
+ - pwsh : |
84
+ echo "Downloading Spark ${{ test.version }}"
85
+ curl -k -L -o spark-${{ test.version }}.tgz https://archive.apache.org/dist/spark/spark-${{ test.version }}/spark-${{ test.version }}-bin-hadoop2.7.tgz
86
+ tar xzvf spark-${{ test.version }}.tgz
87
+ displayName: 'Download Spark Distro ${{ test.version }}'
88
+ workingDirectory: $(Build.BinariesDirectory)
89
+
90
+ - task : DotNetCoreCLI@2
91
+ displayName : ' E2E tests'
92
+ inputs :
93
+ command : test
94
+ projects : ' **/Microsoft.Spark*.E2ETest/*.csproj'
95
+ arguments : ' --configuration $(buildConfiguration) ${{ option.testOptions }}'
96
+ workingDirectory : $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
97
+ env :
98
+ HADOOP_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
99
+ SPARK_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
100
+ DOTNET_WORKER_DIR : $(CURRENT_DOTNET_WORKER_DIR)
101
+
102
+ - pwsh : |
103
+ echo "Downloading ${env:BACKWARD_COMPATIBLE_WORKER_URL}"
104
+ curl -k -L -o Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip ${env:BACKWARD_COMPATIBLE_WORKER_URL}
105
+ unzip Microsoft.Spark.Worker-${{ parameters.backwardCompatibleRelease }}.zip -d $([System.IO.Directory]::GetParent($env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR).FullName)
106
+
107
+ if ($env:AGENT_OS -eq 'Linux') {
108
+ chmod +x "${env:BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR}${env:PATH_SEPARATOR}Microsoft.Spark.Worker"
109
+ }
110
+ displayName: 'Setup Backward Compatible Microsoft Spark Worker ${{ parameters.backwardCompatibleRelease }}'
111
+ workingDirectory: $(Build.BinariesDirectory)
112
+ env:
113
+ SPARK_VERSION: ${{ test.version }}
114
+
115
+ - task : DotNetCoreCLI@2
116
+ displayName : ' E2E Backward Compatibility Tests'
117
+ inputs :
118
+ command : test
119
+ projects : ' **/Microsoft.Spark*.E2ETest/*.csproj'
120
+ arguments : ' --configuration $(buildConfiguration) ${{ option.backwardCompatibleTestOptions }}'
121
+ workingDirectory : $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark
122
+ env :
123
+ HADOOP_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
124
+ SPARK_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
125
+ DOTNET_WORKER_DIR : $(BACKWARD_COMPATIBLE_DOTNET_WORKER_DIR)
126
+
127
+ - checkout : forwardCompatibleRelease
128
+ path : s$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
129
+
130
+ - task : Maven@3
131
+ displayName : ' Maven build src for forward compatible release v${{ parameters.forwardCompatibleRelease }}'
132
+ inputs :
133
+ mavenPomFile : $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}$(PATH_SEPARATOR)src$(PATH_SEPARATOR)scala$(PATH_SEPARATOR)pom.xml
134
+
135
+ - task : DotNetCoreCLI@2
136
+ displayName : ' E2E Forward Compatibility Tests'
137
+ inputs :
138
+ command : test
139
+ projects : ' **/Microsoft.Spark*.E2ETest/*.csproj'
140
+ arguments : ' --configuration $(buildConfiguration) ${{ option.forwardCompatibleTestOptions }}'
141
+ workingDirectory : $(Build.SourcesDirectory)$(PATH_SEPARATOR)dotnet-spark-${{ parameters.forwardCompatibleRelease }}
142
+ env :
143
+ HADOOP_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)hadoop
144
+ SPARK_HOME : $(Build.BinariesDirectory)$(PATH_SEPARATOR)spark-${{ test.version }}-bin-hadoop2.7
145
+ DOTNET_WORKER_DIR : $(CURRENT_DOTNET_WORKER_DIR)
0 commit comments