Jenkinsfile 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401
  1. #!/usr/bin/env groovy
  2. /*
  3. * Copyright (c) Contributors to the Open 3D Engine Project.
  4. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  5. *
  6. * SPDX-License-Identifier: Apache-2.0 OR MIT
  7. *
  8. */
  9. import groovy.json.JsonOutput
  10. import java.util.regex.Pattern
  11. SCRIPTS_PATH = 'scripts/build' // Root path for all CI scripts
  12. PIPELINE_CONFIG_FILE = "${SCRIPTS_PATH}/Jenkins/o3de.json"
  13. INCREMENTAL_BUILD_SCRIPT_PATH = "${SCRIPTS_PATH}/bootstrap/incremental_build_util.py"
  14. EBS_SNAPSHOT_SCRIPT_PATH = "${SCRIPTS_PATH}/tools/ebs_snapshot.py"
  15. PIPELINE_RETRY_ATTEMPTS = 3
  16. // Number of minutes of inactivity in all stages of the pipeline to reach the timeout
  17. PIPELINE_TIMEOUT = 60
  18. EMPTY_JSON = readJSON text: '{}'
  19. ENGINE_REPOSITORY_NAME = 'o3de'
  20. ENGINE_DEVELOPMENT_BRANCH = 'development'
  21. ENGINE_URL = "https://github.com/o3de/o3de.git"
  22. COMMIT_DOMAIN_NAME = ''
  23. COMMIT_ORGANIZATION_NAME = ''
  24. COMMIT_REPOSITORY_NAME = ''
  25. @NonCPS
  26. def doit()
  27. {
  28. def httpsPattern = ~'https://(.+)/(.+)/(.+).git'
  29. def commitUrl = scm.userRemoteConfigs.url[0]
  30. def matcher = commitUrl =~ httpsPattern
  31. echo "matcher ${commitUrl} =~ ${httpsPattern}"
  32. matcher.find()
  33. COMMIT_DOMAIN_NAME = matcher.group(1)
  34. echo "matcher.group(1) = COMMIT_DOMAIN_NAME = ${COMMIT_DOMAIN_NAME}"
  35. COMMIT_ORGANIZATION_NAME = matcher.group(2)
  36. echo "matcher.group(2) = COMMIT_ORGANIZATION_NAME = ${COMMIT_ORGANIZATION_NAME}"
  37. COMMIT_REPOSITORY_NAME = matcher.group(3)
  38. echo "matcher.group(3) = COMMIT_REPOSITORY_NAME = ${COMMIT_REPOSITORY_NAME}"
  39. }
  40. doit()
  41. // Branches with build snapshots
  42. BUILD_SNAPSHOTS = ['development', 'stabilization/2305']
  43. // Build snapshots with empty snapshot (for use with 'SNAPSHOT' pipeline parameter)
  44. BUILD_SNAPSHOTS_WITH_EMPTY = BUILD_SNAPSHOTS + ''
  45. // The default build snapshot to be selected in the 'SNAPSHOT' pipeline parameter
  46. DEFAULT_BUILD_SNAPSHOT = BUILD_SNAPSHOTS_WITH_EMPTY.get(0)
  47. // Branches with build snapshots as comma separated value string
  48. env.BUILD_SNAPSHOTS = BUILD_SNAPSHOTS.join(",")
  49. def pipelineProperties = []
  50. def pipelineParameters = [
  51. // Build/clean Parameters
  52. // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
  53. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
  54. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
  55. booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
  56. booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
  57. booleanParam(defaultValue: false, description: 'Cancels AR immediately on any failure in the pipeline and marks it as failed', name: 'FAIL_FAST'),
  58. booleanParam(defaultValue: false, description: 'Deletes the volume used for the workspace after the pipeline steps are completed.', name: 'DISCARD_VOLUME')
  59. ]
  60. def PlatformSh(cmd, lbl = '', winSlashReplacement = true, winCharReplacement = true) {
  61. if (env.IS_UNIX) {
  62. sh label: lbl,
  63. script: cmd
  64. } else {
  65. if (winSlashReplacement) {
  66. cmd = cmd.replace('/','\\')
  67. }
  68. if (winCharReplacement) {
  69. cmd = cmd.replace('%', '%%')
  70. }
  71. bat label: lbl,
  72. script: cmd
  73. }
  74. }
  75. def PlatformMkdir(path) {
  76. if (env.IS_UNIX) {
  77. sh label: "Making directories ${path}",
  78. script: "mkdir -p ${path}"
  79. } else {
  80. def win_path = path.replace('/','\\')
  81. bat label: "Making directories ${win_path}",
  82. script: "mkdir ${win_path}."
  83. }
  84. }
  85. def PlatformRm(path, sudo=false) {
  86. if (env.IS_UNIX) {
  87. if (sudo) {
  88. sh label: "Removing ${path}",
  89. script: "sudo rm ${path}"
  90. } else {
  91. sh label: "Removing ${path}",
  92. script: "rm ${path}"
  93. }
  94. } else {
  95. def win_path = path.replace('/','\\')
  96. bat label: "Removing ${win_path}",
  97. script: "del /Q ${win_path}"
  98. }
  99. }
  100. def PlatformRmDir(path, sudo=false) {
  101. if (env.IS_UNIX) {
  102. if (sudo) {
  103. sh label: "Removing ${path}",
  104. script: "if [ -d ${path} ]; then sudo rm -rf ${path}; fi"
  105. } else {
  106. sh label: "Removing ${path}",
  107. script: "if [ -d ${path} ]; then rm -rf ${path}; fi"
  108. }
  109. } else {
  110. def win_path = path.replace('/','\\')
  111. bat label: "Removing ${win_path}",
  112. script: "IF exist ${win_path} rd /s /q ${win_path}"
  113. }
  114. }
  115. def IsPullRequest(branchName) {
  116. // temporarily using the name to detect if we are in a PR
  117. // In the future we will check with github
  118. return branchName.startsWith('PR-')
  119. }
  120. def IsPeriodicPipeline(pipelineName) {
  121. return pipelineName.startsWith('periodic-')
  122. }
  123. def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
  124. if (IsPullRequest(branchName) && !IsPeriodicPipeline(pipelineName)) {
  125. return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  126. }
  127. def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
  128. if (!job_list_override.isEmpty()) {
  129. return params[platformName] && job_list_override.contains(buildTypeMap.key);
  130. } else {
  131. return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  132. }
  133. }
  134. def GetRunningPipelineName(JENKINS_JOB_NAME) {
  135. // If the job name has an underscore
  136. def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
  137. if (job_parts.size() > 1) {
  138. return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
  139. }
  140. return [job_parts[0], 'default']
  141. }
  142. @NonCPS
  143. def RegexMatcher(str, regex) {
  144. def matcher = (str =~ regex)
  145. return matcher ? matcher.group(1) : null
  146. }
  147. def LoadPipelineConfig(String pipelineName, String branchName) {
  148. echo 'Loading pipeline config'
  149. def pipelineConfig = {}
  150. pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
  151. PlatformRm(PIPELINE_CONFIG_FILE)
  152. pipelineConfig.platforms = EMPTY_JSON
  153. // Load the pipeline configs per platform
  154. pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
  155. def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
  156. if (!env.IS_UNIX) {
  157. platform_regex = platform_regex.replace('/','\\\\')
  158. }
  159. echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
  160. for (pipeline_config_path in findFiles(glob: pipeline_config)) {
  161. echo "\tFound platform pipeline config ${pipeline_config_path}"
  162. def platform = RegexMatcher(pipeline_config_path, platform_regex)
  163. if(platform) {
  164. pipelineConfig.platforms[platform] = EMPTY_JSON
  165. pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
  166. }
  167. PlatformRm(pipeline_config_path.toString())
  168. }
  169. }
  170. // Load the build configs
  171. pipelineConfig.BUILD_CONFIGS.each { build_config ->
  172. def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
  173. if (!env.IS_UNIX) {
  174. platform_regex = platform_regex.replace('/','\\\\')
  175. }
  176. echo "Searching configs in ${build_config} using ${platform_regex}"
  177. for (build_config_path in findFiles(glob: build_config)) {
  178. echo "\tFound config ${build_config_path}"
  179. def platform = RegexMatcher(build_config_path, platform_regex)
  180. if(platform) {
  181. pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
  182. }
  183. }
  184. }
  185. return pipelineConfig
  186. }
  187. def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
  188. def envVarMap = [:]
  189. platformPipelineEnv = platformEnv['ENV'] ?: [:]
  190. platformPipelineEnv.each { var ->
  191. envVarMap[var.key] = var.value
  192. }
  193. platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
  194. platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
  195. platformPipelineEnvOverride.each { var ->
  196. envVarMap[var.key] = var.value
  197. }
  198. buildTypeEnv.each { var ->
  199. // This may override the above one if there is an entry defined by the job
  200. envVarMap[var.key] = var.value
  201. }
  202. // Environment that only applies to to Jenkins tweaks.
  203. // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
  204. // instances. This allow us to scale up and down without having to re-download 3rdParty
  205. envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
  206. envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
  207. return envVarMap
  208. }
  209. def GetEnvStringList(Map envVarMap) {
  210. def strList = []
  211. envVarMap.each { var ->
  212. strList.add("${var.key}=${var.value}")
  213. }
  214. return strList
  215. }
  216. def GetCrashArtifactDir() {
  217. if (env.IS_UNIX) {
  218. def uname = sh(script: 'uname', returnStdout: true).trim()
  219. if (uname.startsWith('Darwin')) {
  220. return '~/Library/Logs/DiagnosticReports/' // MacOS
  221. }
  222. else {
  223. return '/var/lib/apport/coredump/'
  224. }
  225. } else {
  226. return "$LOCALAPPDATA\\CrashDumps"
  227. }
  228. }
  229. def GetRemoteConfig(repositoryName, repositoryUrl) {
  230. def theRemoteConfig = [
  231. name: repositoryName,
  232. url: repositoryUrl,
  233. credentialsId: scm.userRemoteConfigs.credentialsId[0]
  234. ]
  235. return theRemoteConfig
  236. }
  237. def GetEngineRemoteConfig() {
  238. return GetRemoteConfig(ENGINE_REPOSITORY_NAME, ENGINE_URL)
  239. }
  240. def CheckoutEngineBootstrapScripts(branchName, userRemoteConfig = [GetEngineRemoteConfig()]) {
  241. checkout([$class: 'GitSCM',
  242. branches: [[name: "*/${branchName}"]],
  243. doGenerateSubmoduleConfigurations: false,
  244. extensions: [
  245. [$class: 'PruneStaleBranch'],
  246. [$class: 'AuthorInChangelog'],
  247. [$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [
  248. [ $class: 'SparseCheckoutPath', path: "${SCRIPTS_PATH}/Jenkins/" ],
  249. [ $class: 'SparseCheckoutPath', path: "${SCRIPTS_PATH}/bootstrap/" ],
  250. [ $class: 'SparseCheckoutPath', path: "${SCRIPTS_PATH}/Platform" ],
  251. [ $class: 'SparseCheckoutPath', path: "${SCRIPTS_PATH}/tools/" ]
  252. ]],
  253. // Shallow checkouts break changelog computation. Do not enable.
  254. [$class: 'CloneOption', noTags: false, reference: '', shallow: false]
  255. ],
  256. submoduleCfg: [],
  257. userRemoteConfigs: userRemoteConfig
  258. ])
  259. }
  260. def CheckoutRepo(String branchName, userRemoteConfigs, boolean disableSubmodules = false, boolean setEnvCommit = false) {
  261. echo "Checkout Repo:"
  262. if (fileExists('.git')) {
  263. // If the repository after checkout is locked, likely we took a snapshot while git was running,
  264. // to leave the repo in a usable state, garbage collect.
  265. def indexLockFile = '.git/index.lock'
  266. if (fileExists(indexLockFile)) {
  267. PlatformSh('git gc', 'Git GarbageCollect')
  268. }
  269. if (fileExists(indexLockFile)) { // if it is still there, remove it
  270. PlatformRm(indexLockFile)
  271. }
  272. }
  273. def random = new Random()
  274. def retryAttempt = 0
  275. retry(5) {
  276. if (retryAttempt > 0) {
  277. sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
  278. }
  279. retryAttempt = retryAttempt + 1
  280. checkout scm: [
  281. $class: 'GitSCM',
  282. branches: [[name: branchName]],
  283. extensions: [
  284. [$class: 'PruneStaleBranch'],
  285. [$class: 'AuthorInChangelog'],
  286. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  287. [$class: 'GitLFSPull'],
  288. [$class: 'CheckoutOption', timeout: 60]
  289. ],
  290. userRemoteConfigs: userRemoteConfigs
  291. ]
  292. }
  293. if (setEnvCommit) {
  294. // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
  295. PlatformSh('git rev-parse HEAD > commitid', 'Getting commit id')
  296. env.CHANGE_ID = readFile file: 'commitid'
  297. env.CHANGE_ID = env.CHANGE_ID.trim()
  298. PlatformRm('commitid')
  299. // CHANGE_DATE is used by the installer to provide some ability to sort tagged builds in addition to BRANCH_NAME and CHANGE_ID
  300. commitDateFmt = '%%cI'
  301. if (env.IS_UNIX) commitDateFmt = '%cI'
  302. PlatformSh("git show -s --format=${commitDateFmt} ${env.CHANGE_ID} > commitdate", 'Getting commit date', winSlashReplacement=true, winCharReplacement=false)
  303. env.CHANGE_DATE = readFile file: 'commitdate'
  304. env.CHANGE_DATE = env.CHANGE_DATE.trim()
  305. PlatformRm('commitdate')
  306. }
  307. }
  308. def HandleDriveMount(String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean recreateVolume = false) {
  309. unstash name: 'incremental_build_script'
  310. def pythonCmd = ''
  311. if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  312. else pythonCmd = 'python3 -u '
  313. if(recreateVolume) {
  314. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
  315. }
  316. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --snapshot-hint ${snapshot} --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
  317. if(env.IS_UNIX) {
  318. sh label: 'Setting volume\'s ownership',
  319. script: """
  320. if sudo test ! -d "${workspace}"; then
  321. sudo mkdir -p ${workspace}
  322. cd ${workspace}/..
  323. sudo chown -R lybuilder:root .
  324. fi
  325. """
  326. }
  327. }
  328. //this should assume it will always be executed from the workspace root
  329. def NoControlFile(repoName, processed_repos, o3de_project, defer_enable_gems) {
  330. echo "NoControlFile(reponame:${repoName}, processed_repos:${processed_repos})"
  331. processed_repos.add(repoName)
  332. def ext = ''
  333. if(env.IS_UNIX) {
  334. ext = '.sh'
  335. }
  336. //There is no control file so we must determine if this is a composite repo or a singular repo
  337. dir(repoName) {
  338. //a singular object repo will have one of these o3de json files in the root
  339. if(fileExists("engine.json")) {
  340. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --engine-path ${workspace}/${repoName} || echo>nul", "Registering engine in ${workspace}/${repoName}")
  341. }
  342. else if(fileExists("gem.json")) {
  343. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --gem-path ${workspace}/${repoName} || echo>nul", "Registering gem in ${workspace}/${repoName}")
  344. if(o3de_project) {
  345. defer_enable_gems.add([COMMAND: "${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} enable-gem --gem-path ${workspace}/${repoName} --project-name ${o3de_project}", ECHO: "Enabling ${gemName} in ${o3de_project}"])
  346. }
  347. }
  348. else if(fileExists("project.json")) {
  349. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --project-path ${workspace}/${repoName} || echo>nul", "Registering project in ${workspace}/${repoName}")
  350. }
  351. else if(fileExists("template.json")) {
  352. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --template-path ${workspace}/${repoName} || echo>nul", "Registering template in ${workspace}/${repoName}")
  353. }
  354. else if(fileExists("restricted.json")) {
  355. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --restricted-path ${workspace}/${repoName} || echo>nul", "Registering restricted in ${workspace}/${repoName}")
  356. }
  357. else {
  358. //A standard composite repo has the form
  359. //root
  360. // Engines
  361. // Gems
  362. // Projects
  363. // Templates
  364. // Restricted
  365. def found = false
  366. if(fileExists("Engines")) {
  367. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --all-engines-path ${workspace}/${repoName}/Engines || echo>nul", "Registering all engines in ${workspace}/${repoName}/Engines")
  368. found = true
  369. }
  370. if(fileExists("Gems")) {
  371. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --all-gems-path ${workspace}/${repoName}/Gems || echo>nul", "Registering all gems in ${workspace}/${repoName}/Gems")
  372. if(o3de_project) {
  373. defer_enable_gems.add([COMMAND: "${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} enable-gem --all-gem-paths ${workspace}/${repoName}/Gems --project-name ${o3de_project}", ECHO: "Enabling all gems in ${o3de_project}"])
  374. }
  375. found = true
  376. }
  377. if(fileExists("Projects")) {
  378. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --all-projects-path ${workspace}/${repoName}/Projects || echo>nul", "Registering all projects in ${workspace}/${repoName}/Projects")
  379. found = true
  380. }
  381. if(fileExists("Templates")) {
  382. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --all-templates-path ${workspace}/${repoName}/Templates || echo>nul", "Registering all templates in ${workspace}/${repoName}/Templates")
  383. found = true
  384. }
  385. if(fileExists("Restricted")) {
  386. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --all-restricted-path ${workspace}/${repoName}/Restricted || echo>nul", "Registering all restricted in ${workspace}/${repoName}/Restricted")
  387. found = true
  388. }
  389. if(!found) {
  390. echo "WARNING: No objects found in repo ${repoName}!"
  391. }
  392. }
  393. }
  394. return processed_repos
  395. }
  396. //this should assume it will always be executed from the workspace root
  397. def ExecuteControlFile(controlFile, repoName, processed_repos, o3de_project, defer_enable_gems) {
  398. echo "ExecuteControlFile(controlFile:${controlFile}, reponame:${repoName}, processed_repos:${processed_repos})"
  399. processed_repos.add(repoName)
  400. def ext = ''
  401. if(env.IS_UNIX) {
  402. ext = '.sh'
  403. }
  404. //register all the objects specified in this controlFile
  405. if(controlFile.REGISTER_ENGINES) {
  406. echo 'REGISTER_ENGINES'
  407. controlFile.REGISTER_ENGINES.each { engineName ->
  408. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --engine-path ${workspace}/${repoName}/${engineName} || echo>nul", "Registering engine in ${workspace}/${repoName}/${engineName}")
  409. }
  410. }
  411. if(controlFile.REGISTER_GEMS) {
  412. echo 'REGISTER_GEMS'
  413. controlFile.REGISTER_GEMS.each { gemName ->
  414. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --gem-path ${workspace}/${repoName}/${gemName} || echo>nul", "Registering gem in ${workspace}/${repoName}/${gemName}")
  415. }
  416. }
  417. if(controlFile.REGISTER_PROJECTS) {
  418. echo 'REGISTER_PROJECTS'
  419. controlFile.REGISTER_PROJECTS.each { projectName ->
  420. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --project-path ${workspace}/${repoName}/${projectName} || echo>nul", "Registering project in ${workspace}/${repoName}/${projectName}")
  421. }
  422. }
  423. if(controlFile.REGISTER_TEMPLATES) {
  424. echo 'REGISTER_TEMPLATES'
  425. controlFile.REGISTER_TEMPLATES.each { templateName ->
  426. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --template-path ${workspace}/${repoName}/${templateName} || echo>nul", "Registering template in ${workspace}/${repoName}/${templateName}")
  427. }
  428. }
  429. if(controlFile.REGISTER_RESTRICTED) {
  430. echo 'REGISTER_RESTRICTED'
  431. controlFile.REGISTER_RESTRICTED.each { restrictedName ->
  432. PlatformSh("${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} register --restricted-path ${workspace}/${repoName}/${restrictedName} || echo>nul", "Registering restricted in ${workspace}/${repoName}/${restrictedName}")
  433. }
  434. }
  435. //queue enable all gems specified in the control file
  436. if(controlFile.ENABLE_GEMS && o3de_project) {
  437. controlFile.ENABLE_GEMS.each { gemName ->
  438. if(o3de_project) {
  439. defer_enable_gems.add([COMMAND: "${workspace}/${ENGINE_REPOSITORY_NAME}/scripts/o3de${ext} enable-gem --gem-path ${workspace}/${repoName}/${gemName} --project-name ${o3de_project}", ECHO: "Enabling ${workspace}/${repoName}/${gemName} for ${o3de_project}"])
  440. }
  441. }
  442. }
  443. //check out all repos
  444. if(controlFile.REPOS) {
  445. echo 'REPOS'
  446. controlFile.REPOS.each { repo ->
  447. //stop cyclical repos
  448. if(processed_repos.contains(repo.NAME)) {
  449. echo "Already processed ${repo.NAME}"
  450. }
  451. else {
  452. def repoControlFile = {}
  453. def foundRepoControlFile = false
  454. dir(repo.NAME) {
  455. CheckoutRepo(repo.BRANCH, [GetRemoteConfig(repo.NAME, repo.URL)])
  456. if(fileExists('.automatedtesting.json')) {
  457. echo "Loading ${repo.URL} .automatedtesting.json"
  458. repoControlFile = readJSON file: '.automatedtesting.json'
  459. foundRepoControlFile = true
  460. }
  461. }
  462. if(foundRepoControlFile) {
  463. //There is a control file, execute it
  464. processed_repos = ExecuteControlFile(repoControlFile, repo.NAME, processed_repos, o3de_project, defer_enable_gems)
  465. } else {
  466. processed_repos = NoControlFile(repo.NAME, processed_repos, o3de_project, defer_enable_gems)
  467. }
  468. }
  469. }
  470. }
  471. return processed_repos
  472. }
  473. def PreBuildCommonSteps(Map pipelineConfig, Map params, String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
  474. echo 'Starting pre-build common steps...'
  475. if (mount) {
  476. if(env.RECREATE_VOLUME?.toBoolean()){
  477. echo 'Starting to recreating drive...'
  478. HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, true)
  479. } else {
  480. echo 'Starting to mounting drive...'
  481. HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, false)
  482. }
  483. }
  484. // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
  485. // we need to cleanup. Once all branches take this relocation, we can remove this
  486. if(env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
  487. if(fileExists(workspace)) {
  488. echo 'Clean workspace...'
  489. PlatformRmDir(workspace, true)
  490. }
  491. }
  492. def engine_project = ''
  493. if (pipelineConfig.DEFAULT_PROJECT) {
  494. engine_project = pipelineConfig.DEFAULT_PROJECT
  495. }
  496. else {
  497. engine_project = 'AutomatedTesting'
  498. }
  499. dir(workspace) {
  500. // Add folder where we will store the 3rdParty downloads and packages
  501. dir('3rdParty') {
  502. }
  503. def processed_repos = []
  504. def defer_enable_gems = []
  505. //we always get the engine repo first and get python, setEnvCommit only if the repo is the engine repo
  506. def engineAutomatedTestingControlFile = {}
  507. def engineAutomatedTestingControlFileFound = false
  508. dir(ENGINE_REPOSITORY_NAME) {
  509. echo "Checkout Engine: ${ENGINE_URL}"
  510. def setEnvCommit = false
  511. def the_branch = branchName
  512. def remoteConfig = scm.userRemoteConfigs
  513. if(COMMIT_REPOSITORY_NAME == ENGINE_REPOSITORY_NAME) {
  514. //the pr is for the engine repo, so we want to use the passed in branch name and record the change id and change date
  515. setEnvCommit = true
  516. } else {
  517. //this pr is for another repo so we want to use the development branch for the engine and do not set the commit id or change date
  518. the_branch = ENGINE_DEVELOPMENT_BRANCH
  519. remoteConfig = [GetEngineRemoteConfig()]
  520. }
  521. //checkout the engine branch
  522. CheckoutRepo(the_branch, remoteConfig, disableSubmodules, setEnvCommit)
  523. // Get python and clean.
  524. // Always run the clean step, the scripts detect what variables were set, but it also cleans if
  525. // the NODE_LABEL has changed
  526. if(env.IS_UNIX) {
  527. sh label: 'Getting python',
  528. script: "${pipelineConfig.PYTHON_DIR}/get_python.sh"
  529. sh label: "Running ${platform} clean",
  530. script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
  531. } else {
  532. bat label: 'Getting python',
  533. script: "${pipelineConfig.PYTHON_DIR}/get_python.bat"
  534. bat label: "Running ${platform} clean",
  535. script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
  536. }
  537. //register the engine
  538. def ext = ''
  539. if(env.IS_UNIX) {
  540. ext = '.sh'
  541. }
  542. PlatformSh("scripts/o3de${ext} register --this-engine", "Registering this engine, will generate the o3de_manifest")
  543. //if the engine has an .automatedtesting.json execute it
  544. if(fileExists('.automatedtesting.json')) {
  545. echo 'Loading Engine .automatedtesting.json'
  546. engineAutomatedTestingControlFile = readJSON file: '.automatedtesting.json'
  547. engineAutomatedTestingControlFileFound = true
  548. }
  549. }
  550. if(engineAutomatedTestingControlFileFound) {
  551. processed_repos = ExecuteControlFile(engineAutomatedTestingControlFile, ENGINE_REPOSITORY_NAME, processed_repos, engine_project, defer_enable_gems)
  552. }
  553. //if we are not committing to the engine repo then pull that repo in here
  554. if(COMMIT_REPOSITORY_NAME != ENGINE_REPOSITORY_NAME) {
  555. echo "Commit repo: ${scm.userRemoteConfigs.url[0]}"
  556. def automatedTestingControlFile = {}
  557. def automatedTestingControlFileFound = false
  558. dir(COMMIT_REPOSITORY_NAME) {
  559. CheckoutRepo(branchName, scm.userRemoteConfigs, disableSubmodules, true)
  560. //if the repo has an .automatedtesting.json execute it
  561. if(fileExists('.automatedtesting.json')) {
  562. echo "Loading ${COMMIT_REPOSITORY_NAME} .automatedtesting.json"
  563. automatedTestingControlFile = readJSON file: '.automatedtesting.json'
  564. automatedTestingControlFileFound = true
  565. }
  566. }
  567. def external_project = ''
  568. if(env.PROJECT_OVERRIDE) {
  569. external_project = env.PROJECT_OVERRIDE
  570. }
  571. else if(params && params.CMAKE_LY_PROJECTS) {
  572. external_project = params.CMAKE_LY_PROJECTS
  573. }
  574. else {
  575. echo "No PROJECT_OVERRIDE or CMAKE_LY_PROJECTS defined"
  576. }
  577. echo "external_project = ${external_project}"
  578. if(automatedTestingControlFileFound) {
  579. processed_repos = ExecuteControlFile(automatedTestingControlFile, COMMIT_REPOSITORY_NAME, processed_repos, external_project, defer_enable_gems)
  580. }
  581. else {
  582. processed_repos = NoControlFile(COMMIT_REPOSITORY_NAME, processed_repos, external_project, defer_enable_gems)
  583. }
  584. // For only external projects, set CMAKE_LY_PROJECTS to the relative project_path so it will work on all platforms.
  585. dir(ENGINE_REPOSITORY_NAME) {
  586. // Try/catch in the event that this function is called before the scripts are stashed
  587. try {
  588. unstash "${COMMIT_REPOSITORY_NAME}-scripts"
  589. } catch (e) {
  590. print "Unstash failed, ignoring"
  591. }
  592. def ext = ''
  593. if(env.IS_UNIX) {
  594. ext = '.sh'
  595. }
  596. if (external_project) {
  597. PlatformSh("scripts/o3de${ext} get-registered --project-name ${external_project} > projectpath", "Get the project path")
  598. def project_path = readFile file: 'projectpath'
  599. PlatformRm('projectpath')
  600. project_path = project_path.trim()
  601. project_path = project_path.replace("\\", "/")
  602. workspace_path = "${workspace}"
  603. workspace_path = workspace_path.replace("\\", "/")
  604. project_path = project_path.replace("${workspace}", "..") // Set the relative path to the project from the engine path
  605. project_path = (project_path =~ /\.\.\/[\/a-zA-Z0-9-_]+/)[0] // Ensure only the path is assigned to the variable
  606. env.CMAKE_LY_PROJECTS = project_path
  607. echo "env.CMAKE_LY_PROJECTS = ${env.CMAKE_LY_PROJECTS}"
  608. }
  609. }
  610. }
  611. //defer enabling gems until after the repos are done registering all objects
  612. if(defer_enable_gems) {
  613. echo 'ENABLE_GEMS'
  614. defer_enable_gems.each { enableGemCommands ->
  615. PlatformSh(enableGemCommands.COMMAND, enableGemCommands.ECHO)
  616. }
  617. }
  618. }
  619. }
  620. def Build(Map pipelineConfig, String platform, String type, String workspace) {
  621. timeout(time: env.TIMEOUT, unit: 'MINUTES', activity: true) {
  622. def command = "${pipelineConfig.PYTHON_DIR}/python"
  623. def ext = ''
  624. if(env.IS_UNIX) {
  625. command += '.sh'
  626. ext = '.sh'
  627. }
  628. else command += '.cmd'
  629. command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
  630. dir(workspace) {
  631. dir(ENGINE_REPOSITORY_NAME) {
  632. PlatformSh(command, "Running ${platform} ${type}")
  633. }
  634. }
  635. }
  636. }
  637. def TestMetrics(Map pipelineConfig, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) {
  638. catchError(buildResult: null, stageResult: null) {
  639. def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
  640. def command = "${pipelineConfig.PYTHON_DIR}/python"
  641. if(env.IS_UNIX) command += '.sh'
  642. else command += '.cmd'
  643. dir(workspace){
  644. dir(ENGINE_REPOSITORY_NAME) {
  645. checkout scm: [
  646. $class: 'GitSCM',
  647. branches: [[name: '*/main']],
  648. extensions: [
  649. [$class: 'AuthorInChangelog'],
  650. [$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
  651. ],
  652. userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
  653. ]
  654. withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
  655. command += " -u mars/scripts/python/ctest_test_metric_scraper.py " +
  656. '-e jenkins.creds.user %username% -e jenkins.creds.pass %apitoken% ' +
  657. "-e jenkins.base_url ${env.JENKINS_URL} " +
  658. "${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} --url ${env.BUILD_URL.replace('%','%%')}"
  659. bat label: "Publishing ${buildJobName} Test Metrics",
  660. script: command
  661. }
  662. }
  663. }
  664. }
  665. }
  666. def BenchmarkMetrics(Map pipelineConfig, String workspace, String branchName, String outputDirectory) {
  667. catchError(buildResult: null, stageResult: null) {
  668. def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
  669. def command = "${pipelineConfig.PYTHON_DIR}/python"
  670. if(env.IS_UNIX) command += '.sh'
  671. else command += '.cmd'
  672. dir(workspace) {
  673. dir(ENGINE_REPOSITORY_NAME) {
  674. checkout scm: [
  675. $class: 'GitSCM',
  676. branches: [[name: '*/main']],
  677. extensions: [
  678. [$class: 'AuthorInChangelog'],
  679. [$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
  680. ],
  681. userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
  682. ]
  683. command += " -u mars/scripts/python/benchmark_scraper.py ${cmakeBuildDir} ${branchName}"
  684. PlatformSh(command, "Publishing Benchmark Metrics")
  685. }
  686. }
  687. }
  688. }
  689. def ExportTestResults(Map options, String platform, String type, String workspace, Map params) {
  690. catchError(message: "Error exporting tests results (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  691. dir(workspace) {
  692. dir(ENGINE_REPOSITORY_NAME) {
  693. dir(params.OUTPUT_DIRECTORY) {
  694. junit testResults: "Testing/**/*.xml"
  695. }
  696. }
  697. }
  698. }
  699. }
  700. def ExportTestScreenshots(Map options, String branchName, String platformName, String jobName, String workspace, Map params) {
  701. catchError(message: "Error exporting test screenshots (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  702. dir(workspace){
  703. dir(ENGINE_REPOSITORY_NAME) {
  704. def screenshotsFolder = "AutomatedTesting/user/PythonTests/Automated/Screenshots"
  705. def s3Uploader = "${SCRIPTS_PATH}/tools/upload_to_s3.py"
  706. def command = "${options.PYTHON_DIR}/python.cmd -u ${s3Uploader} --base_dir ${screenshotsFolder} " +
  707. '--file_regex \\"(.*zip\$)\\" ' +
  708. "--bucket ${env.TEST_SCREENSHOT_BUCKET} " +
  709. "--search_subdirectories True --key_prefix ${branchName}_${env.BUILD_NUMBER} " +
  710. '--extra_args {\\"ACL\\":\\"bucket-owner-full-control\\"}'
  711. PlatformSh(command, "Uploading test screenshots for ${jobName}")
  712. }
  713. }
  714. }
  715. }
  716. // All files are included by default.
  717. // --include will only re-include files that have been excluded from an --exclude filter.
  718. //See more details at https://docs.aws.amazon.com/cli/latest/reference/s3/#use-of-exclude-and-include-filters
  719. def ArchiveArtifactsOnS3(String artifactsSource, String s3Prefix="", boolean recursive=false, List<String> includes=[], List<String> excludes=[]) {
  720. if (!fileExists(s3Prefix)) {
  721. PlatformMkdir(s3Prefix)
  722. }
  723. PlatformSh("echo ${env.BUILD_URL} > ${s3Prefix}/build_url.txt")
  724. // archiveArtifacts is very slow, so we only archive one file and upload the rest artifacts to the same bucket using S3 CLI.
  725. archiveArtifacts artifacts: "${s3Prefix}/build_url.txt"
  726. def command = "aws s3 cp ${artifactsSource} s3://${env.JENKINS_ARTIFACTS_S3_BUCKET}/${env.JENKINS_JOB_NAME}/${env.BUILD_NUMBER}/artifacts/${s3Prefix} --only-show-errors "
  727. excludes.each{ exclude ->
  728. command += "--exclude \"${exclude}\" "
  729. }
  730. includes.each{ include ->
  731. command += "--include \"${include}\" "
  732. }
  733. if (recursive) command += "--recursive "
  734. PlatformSh(command, "Archiving artifacts to ${env.JENKINS_JOB_NAME}/${env.BUILD_NUMBER}/artifacts/${s3Prefix}", false)
  735. }
  736. def UploadAPLogs(String platformName, String jobName, String workspace, Map params) {
  737. catchError(message: "Error archiving AssetProcessor logs (this won't fail the build)", buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
  738. dir(workspace) {
  739. dir(ENGINE_REPOSITORY_NAME) {
  740. projects = params.CMAKE_LY_PROJECTS.split(",")
  741. projects.each{ project ->
  742. ArchiveArtifactsOnS3("${project}/user/log", "ap_logs/${platformName}/${jobName}/${project}", true)
  743. }
  744. }
  745. }
  746. }
  747. }
  748. def UploadTestArtifacts(String workspace, String outputDirectory) {
  749. catchError(message: "Error archiving test artifacts (this won't fail the build)", buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
  750. def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
  751. echo "Uploading Test Artifacts: ${cmakeBuildDir}/Testing"
  752. ArchiveArtifactsOnS3("${cmakeBuildDir}/Testing", "test_artifacts", true)
  753. }
  754. }
  755. def UploadCrashArtifacts(String platform) {
  756. catchError(message: "Error archiving crash artifacts (this won't fail the build)", buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
  757. def crashArtifactDir = GetCrashArtifactDir()
  758. echo "Uploading Crash Artifacts: ${crashArtifactDir}"
  759. ArchiveArtifactsOnS3("${crashArtifactDir}", "${platform}_crash_artifacts", true)
  760. }
  761. }
  762. def UploadBuildArtifacts(String workspace, String platform, String jobName, String s3sisUploadParams = "") {
  763. catchError(message: "Error uploading build artifacts (this won't fail the build)", buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
  764. dir("${workspace}/s3sis") {
  765. PlatformSh("aws s3 cp ${env.S3SIS_SOURCE} . --recursive", "Downloading s3sis", winSlashReplacement=false)
  766. if (env.IS_UNIX) {
  767. PlatformSh("sudo python3 setup.py install", "Installing S3SIS")
  768. } else {
  769. PlatformSh("python3 setup.py install", "Installing S3SIS")
  770. }
  771. PlatformSh("s3siscli configure", "Configuring S3SIS")
  772. }
  773. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  774. try {
  775. def cmd = "s3siscli upload"
  776. if (s3sisUploadParams?.trim()) {
  777. cmd += " ${s3sisUploadParams}"
  778. }
  779. current_cmd = cmd + " --label ${env.JENKINS_JOB_NAME}/${platform}/${jobName}/${env.BUILD_NUMBER}"
  780. PlatformSh(current_cmd, "Uploading build artifacts", winSlashReplacement=false)
  781. latest_cmd = cmd + " --label ${env.JENKINS_JOB_NAME}/${platform}/${jobName}/latest"
  782. PlatformSh(latest_cmd, "Uploading build artifacts", winSlashReplacement=false)
  783. } catch (Exception e) {
  784. echo "WARN: Failed to upload build artifacts. \n${e}"
  785. }
  786. }
  787. }
  788. }
  789. def PostBuildCommonSteps(String workspace, Map params, String projectName, String pipelineName, String branchName, String platform, String buildType, boolean mount = true) {
  790. echo 'Starting post-build common steps...'
  791. if (params && params.containsKey('OUTPUT_DIRECTORY')){
  792. dir(workspace) {
  793. dir(ENGINE_REPOSITORY_NAME) {
  794. dir(params.OUTPUT_DIRECTORY) {
  795. // Clean up Testing directory
  796. PlatformRmDir("Testing")
  797. // Recreate test runner xml directories that need to be pre generated to prevent race condition on incremental runs
  798. PlatformMkdir("Testing/Pytest")
  799. PlatformMkdir("Testing/Gtest")
  800. }
  801. }
  802. }
  803. }
  804. if (mount) {
  805. def pythonCmd = ''
  806. if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  807. else pythonCmd = 'python3 -u '
  808. try {
  809. timeout(5) {
  810. if (env.DISCARD_VOLUME?.toBoolean()) {
  811. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${env.REPOSITORY_NAME} --project ${projectName} --pipeline ${pipelineName} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
  812. } else {
  813. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
  814. }
  815. }
  816. } catch (Exception e) {
  817. echo "Unmount script error ${e}"
  818. }
  819. }
  820. }
  821. def HandleDriveSnapshots(String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType) {
  822. unstash name: 'ebs_snapshot_script'
  823. catchError(message: "Error snapshotting volume (this won't fail the build)", buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
  824. def pythonCmd = 'python3 -u '
  825. mountName = "Name:${repositoryName}_${projectName}_${pipeline}_${branchName}_${platform}_${buildType}"
  826. mountName = mountName.replace('/', '_').replace('\\', '_')
  827. PlatformSh("${pythonCmd} ${EBS_SNAPSHOT_SCRIPT_PATH} --action create --tags ${mountName} --execute", "Starting volume snapshots", true)
  828. PlatformSh("${pythonCmd} ${EBS_SNAPSHOT_SCRIPT_PATH} --action delete --tags ${mountName} --retention ${env.SNAP_RETENTION} --execute", "Cleaning up old snapshots", true)
  829. }
  830. }
  831. def CreateSetupStage(Map pipelineConfig, Map params, String snapshot, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars, boolean onlyMountEBSVolume = false) {
  832. return {
  833. stage('Setup') {
  834. if(onlyMountEBSVolume) {
  835. HandleDriveMount(snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], false)
  836. } else {
  837. PreBuildCommonSteps(pipelineConfig, params, snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'], false)
  838. }
  839. }
  840. }
  841. }
  842. def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
  843. return {
  844. stage("${jobName}") {
  845. Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
  846. }
  847. }
  848. }
  849. def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) {
  850. return {
  851. stage("${buildJobName}_metrics") {
  852. TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration)
  853. BenchmarkMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, outputDirectory)
  854. }
  855. }
  856. }
  857. def CreateExportTestResultsStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars, Map params) {
  858. return {
  859. stage("${jobName}_results") {
  860. ExportTestResults(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'], params)
  861. }
  862. }
  863. }
  864. def CreateExportTestScreenshotsStage(Map pipelineConfig, String branchName, String platformName, String jobName, Map environmentVars, Map params) {
  865. return {
  866. stage("${jobName}_screenshots") {
  867. ExportTestScreenshots(pipelineConfig, branchName, platformName, jobName, environmentVars['WORKSPACE'], params)
  868. }
  869. }
  870. }
  871. def CreateUploadAPLogsStage(String platformName, String jobName, String workspace, Map params) {
  872. return {
  873. stage("${jobName}_upload_ap_logs") {
  874. UploadAPLogs(platformName, jobName, workspace, params)
  875. }
  876. }
  877. }
  878. def CreateUploadTestArtifactStage(String jobName, String workspace, String outputDirectory) {
  879. return {
  880. stage("${jobName}_upload_test_artifacts") {
  881. UploadTestArtifacts(workspace, outputDirectory)
  882. }
  883. }
  884. }
  885. def CreateUploadCrashArtifactStage(String jobName, String platform) {
  886. return {
  887. stage("${jobName}_upload_crash_artifacts") {
  888. UploadCrashArtifacts(platform)
  889. }
  890. }
  891. }
  892. def CreateUploadBuildArtifactStage(String workspace, String platform, String jobName, String s3sisUploadParams="") {
  893. if(env.S3SIS_UPLOAD_PARAMS?.trim()) {
  894. s3sisUploadParams = env.S3SIS_UPLOAD_PARAMS
  895. }
  896. return {
  897. stage("${jobName}_upload_build_artifacts") {
  898. UploadBuildArtifacts(workspace, platform, jobName, s3sisUploadParams)
  899. }
  900. }
  901. }
  902. def CreateTeardownStage(Map environmentVars, Map params, String projectName, String pipelineName, String branchName, String platformName, String buildType) {
  903. return {
  904. stage('Teardown') {
  905. PostBuildCommonSteps(environmentVars['WORKSPACE'], params, projectName, pipelineName, branchName, platformName, buildType, environmentVars['MOUNT_VOLUME'])
  906. }
  907. }
  908. }
  909. def CreateSnapshotStage(String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String buildType, String jobName) {
  910. return{
  911. stage("${jobName}_snapshot_ebs_volume") {
  912. HandleDriveSnapshots(repositoryName, projectName, pipelineName, branchName, platformName, buildType)
  913. }
  914. }
  915. }
  916. def CreateSingleNode(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName, boolean onlyMountEBSVolume = false) {
  917. def nodeLabel = envVars['NODE_LABEL']
  918. return {
  919. def currentResult = ''
  920. def currentException = ''
  921. retry(PIPELINE_RETRY_ATTEMPTS) {
  922. node("${nodeLabel}") {
  923. if(isUnix()) { // Has to happen inside a node
  924. envVars['IS_UNIX'] = 1
  925. }
  926. withEnv(GetEnvStringList(envVars)) {
  927. def build_job_name = build_job.key
  928. def params = platform.value.build_types[build_job_name].PARAMETERS
  929. try {
  930. CreateSetupStage(pipelineConfig, params, snapshot, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars, onlyMountEBSVolume).call()
  931. if(build_job.value.steps) { // This is a pipe with many steps so create all the build stages
  932. pipelineEnvVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  933. build_job.value.steps.each { build_step ->
  934. build_job_name = build_step
  935. params = platform.value.build_types[build_job_name].PARAMETERS
  936. // This addition of maps makes it that the right operand will override entries if they overlap with the left operand
  937. envVars = pipelineEnvVars + GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, platform.value.build_types[build_step].PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  938. try {
  939. CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
  940. }
  941. catch (Exception e) {
  942. if (envVars['NONBLOCKING_STEP']?.toBoolean()) {
  943. unstable(message: "Build step ${build_step} failed but it's a non-blocking step in build job ${build_job.key}")
  944. } else {
  945. throw e
  946. }
  947. }
  948. }
  949. } else {
  950. CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
  951. }
  952. }
  953. catch(Exception e) {
  954. if (e instanceof org.jenkinsci.plugins.workflow.steps.FlowInterruptedException) {
  955. def causes = e.getCauses().toString()
  956. if (causes.contains('RemovedNodeCause')) {
  957. error "Node disconnected during build: ${e}" // Error raised to retry stage on a new node
  958. }
  959. }
  960. if (build_job_name.toLowerCase().contains('asset') && env.IS_UPLOAD_AP_LOGS?.toBoolean()) {
  961. CreateUploadAPLogsStage(platform.key, build_job_name, envVars['WORKSPACE'], platform.value.build_types[build_job_name].PARAMETERS).call()
  962. }
  963. // Upload test artifacts only on builds that failed and ran test suites
  964. if (env.IS_UPLOAD_TEST_ARTIFACTS?.toBoolean() && params.containsKey('CTEST_OPTIONS')) {
  965. CreateUploadTestArtifactStage(build_job_name, envVars['WORKSPACE'], params.OUTPUT_DIRECTORY).call()
  966. }
  967. // All other errors will be raised outside the retry block
  968. currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
  969. currentException = e.toString()
  970. }
  971. finally {
  972. if (env.MARS_REPO && params && params.containsKey('TEST_METRICS') && params.TEST_METRICS == 'True') {
  973. CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, params.OUTPUT_DIRECTORY, params.CONFIGURATION).call()
  974. }
  975. if (params && params.containsKey('TEST_RESULTS') && params.TEST_RESULTS == 'True') {
  976. CreateExportTestResultsStage(pipelineConfig, platform.key, build_job_name, envVars, params).call()
  977. }
  978. if (fileExists(GetCrashArtifactDir())) {
  979. CreateUploadCrashArtifactStage(build_job_name, platform.key).call()
  980. }
  981. if (params && params.containsKey('TEST_SCREENSHOTS') && params.TEST_SCREENSHOTS == 'True' && currentResult == 'FAILURE') {
  982. CreateExportTestScreenshotsStage(pipelineConfig, branchName, platform.key, build_job_name, envVars, params).call()
  983. }
  984. if (env.UPLOAD_BUILD_ARTIFACTS?.toBoolean()) {
  985. CreateUploadBuildArtifactStage(envVars['WORKSPACE'], platform.key, build_job_name, envVars['S3SIS_UPLOAD_PARAMS']).call()
  986. }
  987. CreateTeardownStage(envVars, params, projectName, pipelineName, branchName, platform.key, build_job.key).call()
  988. if (envVars['CREATE_SNAPSHOT']?.toBoolean()) {
  989. CreateSnapshotStage(repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, build_job_name).call()
  990. }
  991. }
  992. }
  993. }
  994. }
  995. // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
  996. // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
  997. if (currentResult == 'FAILURE') {
  998. currentBuild.result = 'FAILURE'
  999. error "FAILURE: ${currentException}"
  1000. } else if (currentResult == 'UNSTABLE') {
  1001. currentBuild.result = 'UNSTABLE'
  1002. unstable(message: "UNSTABLE: ${currentException}")
  1003. }
  1004. }
  1005. }
  1006. // Used in CreateBuildJobs() to preprocess the build_job steps to programmatically create
  1007. // Node sections with a set of steps that can run on that node.
  1008. class PipeStepJobData {
  1009. String m_nodeLabel = ""
  1010. def m_steps = []
  1011. PipeStepJobData(String label) {
  1012. this.m_nodeLabel = label
  1013. }
  1014. def addStep(def step) {
  1015. this.m_steps.add(step)
  1016. }
  1017. }
  1018. def CreateBuildJobs(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName) {
  1019. // if this is a pipeline, split jobs based on the NODE_LABEL
  1020. if(build_job.value.steps) {
  1021. def defaultLabel = envVars['NODE_LABEL']
  1022. def lastNodeLabel = ""
  1023. def jobList = []
  1024. def currentIdx = -1;
  1025. // iterate the steps to build the order of node label + steps sets.
  1026. // Order matters, as it is executed from first to last.
  1027. // example layout.
  1028. // node A
  1029. // step 1
  1030. // step 2
  1031. // node B
  1032. // step 3
  1033. // node C
  1034. // step 4
  1035. build_job.value.steps.each { build_step ->
  1036. //if node label defined
  1037. if(platform.value.build_types[build_step] && platform.value.build_types[build_step].PIPELINE_ENV &&
  1038. platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']) {
  1039. //if the last node label doesn't match the new one, append it.
  1040. if(platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL'] != lastNodeLabel) {
  1041. lastNodeLabel = platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']
  1042. jobList.add(new PipeStepJobData(lastNodeLabel))
  1043. currentIdx++
  1044. }
  1045. }
  1046. //no label define, so it needs to run on the default node label
  1047. else if(lastNodeLabel != defaultLabel) { //if the last node is not the default, append default
  1048. lastNodeLabel = defaultLabel
  1049. jobList.add(new PipeStepJobData(lastNodeLabel))
  1050. currentIdx++
  1051. }
  1052. //add the build_step to the current node
  1053. jobList[currentIdx].addStep(build_step)
  1054. }
  1055. return {
  1056. jobList.eachWithIndex{ element, idx ->
  1057. //update the node label + steps to the discovered data
  1058. envVars['NODE_LABEL'] = element.m_nodeLabel
  1059. build_job.value.steps = element.m_steps
  1060. //no any additional nodes just mount the drive, do not handle clean parameters as that will be done by the first node.
  1061. boolean onlyMountEBSVolume = idx != 0;
  1062. //add this node
  1063. CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName, onlyMountEBSVolume).call()
  1064. }
  1065. }
  1066. } else {
  1067. return CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
  1068. }
  1069. }
  1070. def projectName = ''
  1071. def pipelineName = ''
  1072. def branchName = ''
  1073. def pipelineConfig = {}
  1074. // Start Pipeline
  1075. try {
  1076. timeout(time: PIPELINE_TIMEOUT, unit: 'MINUTES', activity: true) {
  1077. stage('Setup Pipeline') {
  1078. node('controller') {
  1079. def envVarList = []
  1080. if(isUnix()) {
  1081. envVarList.add('IS_UNIX=1')
  1082. }
  1083. withEnv(envVarList) {
  1084. timestamps {
  1085. repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
  1086. // repositoryName is the full repository name
  1087. repositoryName = (repositoryUrl =~ /https:\/\/(.+)\/(.+)\.git/)[0][1]
  1088. env.REPOSITORY_NAME = repositoryName
  1089. (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
  1090. env.PIPELINE_NAME = pipelineName
  1091. if(env.BRANCH_NAME) {
  1092. branchName = env.BRANCH_NAME
  1093. } else {
  1094. branchName = scm.branches[0].name // for non-multibranch pipelines
  1095. env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
  1096. }
  1097. if(env.CHANGE_TARGET) {
  1098. // PR builds
  1099. if(BUILD_SNAPSHOTS.contains(env.CHANGE_TARGET)) {
  1100. snapshot = env.CHANGE_TARGET
  1101. echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" found."
  1102. } else {
  1103. snapshot = DEFAULT_BUILD_SNAPSHOT
  1104. echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" does not exist, defaulting to snapshot \"${snapshot}\""
  1105. }
  1106. } else {
  1107. // Non-PR builds
  1108. pipelineParameters.add(choice(name: 'SNAPSHOT', choices: BUILD_SNAPSHOTS_WITH_EMPTY, description: 'Selects the build snapshot to use. A more diverted snapshot will cause longer build times, but will not cause build failures.'))
  1109. snapshot = env.SNAPSHOT
  1110. echo "Snapshot \"${snapshot}\" selected."
  1111. }
  1112. pipelineProperties.add(disableConcurrentBuilds())
  1113. echo "Running repository: \"${repositoryName}\", pipeline: \"${pipelineName}\", branch: \"${branchName}\", CHANGE_ID: \"${env.CHANGE_ID}\", GIT_COMMMIT: \"${scm.GIT_COMMIT}\"..."
  1114. CheckoutEngineBootstrapScripts(branchName, scm.userRemoteConfigs)
  1115. // Stash any project based build configs
  1116. stash name: "${COMMIT_REPOSITORY_NAME}-scripts", includes: "scripts/build/**", allowEmpty: true
  1117. // Project or external repos may not have the bootstrapping scripts. For these cases, pull the bootstrap scripts from the engine repo at the default branch
  1118. if (!fileExists(SCRIPTS_PATH) || !fileExists(PIPELINE_CONFIG_FILE) || !(COMMIT_REPOSITORY_NAME == ENGINE_REPOSITORY_NAME)) {
  1119. echo "No bootstrap scripts found in ${SCRIPTS_PATH} or working repo is not the engine repo, downloading it from the engine repo at ${ENGINE_DEVELOPMENT_BRANCH}"
  1120. CheckoutEngineBootstrapScripts(ENGINE_DEVELOPMENT_BRANCH)
  1121. }
  1122. unstash "${COMMIT_REPOSITORY_NAME}-scripts"
  1123. // Load configs
  1124. pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
  1125. // Add each platform as a parameter that the user can disable if needed
  1126. if (!IsPullRequest(branchName) || IsPeriodicPipeline(pipelineName)) {
  1127. pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
  1128. pipelineParameters.add(stringParam(defaultValue: '', description: 'Name of the project to build. Overrides default project', name: 'PROJECT_OVERRIDE'))
  1129. pipelineParameters.add(booleanParam(defaultValue: false, description: 'Upload build artifacts to S3.', name: 'UPLOAD_BUILD_ARTIFACTS'),)
  1130. pipelineParameters.add(stringParam(defaultValue: '', description: 'Additional S3SIS upload parameters, for example, use --include or --exclude to specify the files to be uploaded.', name: 'S3SIS_UPLOAD_PARAMS'))
  1131. pipelineConfig.platforms.each { platform ->
  1132. pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
  1133. }
  1134. }
  1135. // Add additional Jenkins parameters
  1136. pipelineConfig.platforms.each { platform ->
  1137. platformEnv = platform.value.PIPELINE_ENV
  1138. pipelineJenkinsParameters = platformEnv['PIPELINE_JENKINS_PARAMETERS'] ?: [:]
  1139. jenkinsParametersToAdd = pipelineJenkinsParameters[pipelineName] ?: [:]
  1140. jenkinsParametersToAdd.each{ jenkinsParameter ->
  1141. defaultValue = jenkinsParameter['default_value']
  1142. // Use last run's value as default value so we can save values in different Jenkins environment
  1143. if (jenkinsParameter['use_last_run_value']?.toBoolean()) {
  1144. defaultValue = params."${jenkinsParameter['parameter_name']}" ?: jenkinsParameter['default_value']
  1145. }
  1146. switch (jenkinsParameter['parameter_type']) {
  1147. case 'string':
  1148. pipelineParameters.add(stringParam(defaultValue: defaultValue,
  1149. description: jenkinsParameter['description'],
  1150. name: jenkinsParameter['parameter_name']
  1151. ))
  1152. break
  1153. case 'boolean':
  1154. pipelineParameters.add(booleanParam(defaultValue: defaultValue,
  1155. description: jenkinsParameter['description'],
  1156. name: jenkinsParameter['parameter_name']
  1157. ))
  1158. break
  1159. case 'password':
  1160. pipelineParameters.add(password(defaultValue: defaultValue,
  1161. description: jenkinsParameter['description'],
  1162. name: jenkinsParameter['parameter_name']
  1163. ))
  1164. break
  1165. }
  1166. }
  1167. }
  1168. pipelineProperties.add(parameters(pipelineParameters.unique()))
  1169. properties(pipelineProperties)
  1170. // Stash the INCREMENTAL_BUILD_SCRIPT_PATH and EBS_SNAPSHOT_SCRIPT_PATH since all nodes will use it
  1171. stash name: 'incremental_build_script',
  1172. includes: INCREMENTAL_BUILD_SCRIPT_PATH
  1173. if (fileExists(EBS_SNAPSHOT_SCRIPT_PATH)) {
  1174. stash name: 'ebs_snapshot_script',
  1175. includes: EBS_SNAPSHOT_SCRIPT_PATH
  1176. }
  1177. }
  1178. }
  1179. }
  1180. }
  1181. if(env.BUILD_NUMBER == '1') {
  1182. // Exit pipeline early on the initial build. This allows Jenkins to load the pipeline for the branch and enables users
  1183. // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
  1184. if (!IsPullRequest(branchName)) {
  1185. currentBuild.result = 'SUCCESS'
  1186. return
  1187. }
  1188. else if (IsPeriodicPipeline(pipelineName)) {
  1189. currentBuild.result = 'NOT_BUILT'
  1190. return
  1191. }
  1192. }
  1193. def someBuildHappened = false
  1194. // Build and Post-Build Testing Stage
  1195. def buildConfigs = [:]
  1196. // Platform Builds run on EC2
  1197. pipelineConfig.platforms.each { platform ->
  1198. platform.value.build_types.each { build_job ->
  1199. if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
  1200. def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  1201. envVars['JENKINS_JOB_NAME'] = env.JOB_NAME // Save original Jenkins job name to JENKINS_JOB_NAME
  1202. envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
  1203. someBuildHappened = true
  1204. buildConfigs["${platform.key} [${build_job.key}]"] = CreateBuildJobs(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
  1205. }
  1206. }
  1207. }
  1208. timestamps {
  1209. stage('Build') {
  1210. if (params.FAIL_FAST) {
  1211. echo "Fail fast option enabled"
  1212. buildConfigs.failFast = true
  1213. }
  1214. parallel buildConfigs // Run parallel builds
  1215. }
  1216. echo 'All builds successful'
  1217. }
  1218. if (!someBuildHappened) {
  1219. currentBuild.result = 'NOT_BUILT'
  1220. }
  1221. if("${currentBuild.currentResult}" == "SUCCESS" && IsPeriodicPipeline(pipelineName) && IsPullRequest(branchName)) {
  1222. currentBuild.result = 'UNSTABLE'
  1223. currentBuild.description = 'Builds succeeded but regular PR run is required to merge the PR.'
  1224. }
  1225. }
  1226. }
  1227. catch(Exception e) {
  1228. error "Exception: ${e}"
  1229. }
  1230. finally {
  1231. try {
  1232. node('controller') {
  1233. if("${currentBuild.currentResult}" == "SUCCESS") {
  1234. buildFailure = ""
  1235. emailBody = "${BUILD_URL}\nSuccess!"
  1236. } else {
  1237. buildFailure = tm('${BUILD_FAILURE_ANALYZER}')
  1238. emailBody = "${BUILD_URL}\n${buildFailure}!"
  1239. }
  1240. if(env.POST_AR_BUILD_SNS_TOPIC) {
  1241. message_json = [
  1242. "build_url": env.BUILD_URL,
  1243. "build_number": env.BUILD_NUMBER,
  1244. "repository_name": env.REPOSITORY_NAME,
  1245. "branch_name": env.BRANCH_NAME,
  1246. "pipeline_name": GetRunningPipelineName(env.JOB_NAME)[1],
  1247. "full_pipeline_name": env.JOB_NAME,
  1248. "build_result": "${currentBuild.currentResult}",
  1249. "build_failure": buildFailure,
  1250. "recreate_volume": env.RECREATE_VOLUME,
  1251. "clean_output_directory": env.CLEAN_OUTPUT_DIRECTORY,
  1252. "clean_assets": env.CLEAN_ASSETS,
  1253. "fail_fast": env.FAIL_FAST,
  1254. "project_name": env.CMAKE_LY_PROJECTS
  1255. ]
  1256. snsPublish(
  1257. topicArn: env.POST_AR_BUILD_SNS_TOPIC,
  1258. subject:'Build Result',
  1259. message:JsonOutput.toJson(message_json)
  1260. )
  1261. }
  1262. emailext (
  1263. body: "${emailBody}",
  1264. subject: "${currentBuild.currentResult}: ${JOB_NAME} - Build # ${BUILD_NUMBER}",
  1265. recipientProviders: [
  1266. [$class: 'RequesterRecipientProvider']
  1267. ]
  1268. )
  1269. }
  1270. } catch(Exception e) {
  1271. }
  1272. }