ci/cd env: removed all jenkins files and src-rev scripts
The scripts and the pipelines are now located in the build directory BugzID: 78260 Signed-off-by: Marc Mattmueller <marc.mattmueller@netmodule.com>
This commit is contained in:
parent
7a078ecee0
commit
696db7ca10
|
|
@ -1,191 +0,0 @@
|
|||
// common parts for yocto builds
|
||||
|
||||
// BUILD_DIR_POSTFIX needs to be defined in the job before
|
||||
// loading this module
|
||||
env.SHARED_BUILD = "${WORKSPACE}/build${env.BUILD_DIR_POSTFIX}"
|
||||
env.BUILD_DEPLOY_DIR = "${env.SHARED_BUILD}/tmp/deploy/images"
|
||||
env.BUILD_LICENSE_DIR = "${env.SHARED_BUILD}/tmp/deploy/licenses"
|
||||
env.BUILD_HISTORY_DIR = "${env.SHARED_BUILD}/buildhistory"
|
||||
env.DISTRO_VERSION_PATHNAME = "${env.SHARED_BUILD}/conf/distro_version.inc"
|
||||
env.DOWNLOAD_DIR = "${WORKSPACE}/build-common/downloads"
|
||||
env.BINARY_STORAGE_URL = "http://nmrepo.netmodule.intranet/src/yocto-downloads"
|
||||
env.SUBMODULE_VERION_FILE = "submodule_revisions"
|
||||
env.DISTRO_VERSION_FILE = "distro_version.inc"
|
||||
env.AUTOREV_VERSION_FILE = "autorev_revisions.inc"
|
||||
env.PACKAGE_NAME = "nm-os"
|
||||
|
||||
|
||||
echo "loading common yocto build module..."
|
||||
|
||||
|
||||
// Methods declared in external code are accessible
|
||||
// directly from other code in the external file
|
||||
// indirectly via the object created by the load operation
|
||||
// eg. extcode.build(...)
|
||||
|
||||
|
||||
def cleanWorkspace() {
|
||||
println "cleaning workspace..."
|
||||
sh "[ -d \"${WORKSPACE}/build-common/sstate-cache\" ] && find \"${WORKSPACE}/build-common/sstate-cache/\" -name \"sstate*\" -atime +3 -delete || true"
|
||||
sh "[ -d \"${env.SHARED_BUILD}/tmp/work\" ] && find \"${env.SHARED_BUILD}/tmp/work/\" -name temp -type d -exec rm -rf {} + || true"
|
||||
}
|
||||
|
||||
|
||||
def isRelease(versionParam) {
|
||||
if((versionParam == "") || (versionParam == "latest")) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
def handleSubmodules(versionParam) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
println "init submodules..."
|
||||
sh 'git submodule update --init'
|
||||
if(isRelease(versionParam)) {
|
||||
println "set submodules to freezed state..."
|
||||
sh 'git submodule update' // set all submodules to freezed commit
|
||||
}
|
||||
else {
|
||||
println "setting netmodule submodule hashes to head..."
|
||||
sh 'git submodule update --remote --rebase meta-netmodule-*' // update our own submodules to HEAD
|
||||
}
|
||||
}
|
||||
submoduleStatus = sh(returnStdout: true, script: "git submodule status").trim() // print submodule hashes to jenkins log
|
||||
println "${submoduleStatus}"
|
||||
writeFile(file: "${env.SUBMODULE_VERION_FILE}", text: "${submoduleStatus}")
|
||||
}
|
||||
|
||||
def handleAutoRevIncludeFile(versionParam) {
|
||||
if(isRelease(versionParam)) {
|
||||
println "suppressing autorev include file..."
|
||||
sh(script: "sed '/autorev-packages/d' -i ${env.SHARED_BUILD}/conf/local.conf")
|
||||
}
|
||||
}
|
||||
def cleanupAutoRevIncludeFile(versionParam) {
|
||||
if(isRelease(versionParam)) {
|
||||
println "clean-up suppressed autorev include file..."
|
||||
sh(script: "git checkout ${env.SHARED_BUILD}/conf/local.conf")
|
||||
}
|
||||
}
|
||||
|
||||
def updateSourceRevisions() {
|
||||
println "update source revisions to head..."
|
||||
sh(returnStdout: true, script: "bash -c '. ./env/distro/ostree-image > /dev/null && cd ../ && ./src-rev.sh -v -d -r -l ./srcrev.log -a ./autorev-packages.inc'")
|
||||
}
|
||||
|
||||
|
||||
def getTopUpstreamBuildNumber() {
|
||||
// Iterating though all upstream jobs:
|
||||
// currentBuild.upstreamBuilds.each { item ->
|
||||
// echo "upstream build: ${item}"
|
||||
// def nbr = item.getNumber()
|
||||
// echo "nbr=${nbr}"
|
||||
// }
|
||||
def upstreamJobList = currentBuild.upstreamBuilds
|
||||
def nbrOfUpstreamJobs = upstreamJobList.size()
|
||||
if (nbrOfUpstreamJobs == 0)
|
||||
return 0;
|
||||
def topJob = upstreamJobList[nbrOfUpstreamJobs-1]
|
||||
println "Top upstream project: " + topJob.getFullDisplayName()
|
||||
def topJobNbr = topJob.getNumber()
|
||||
println "Top upstream job build Number = ${topJobNbr}"
|
||||
return topJobNbr
|
||||
}
|
||||
|
||||
def buildVersionString(imageType, actualBaseVersionString, versionParameter) {
|
||||
// official release version
|
||||
if(isRelease(versionParameter)) {
|
||||
String newVersionStr = versionParameter
|
||||
return newVersionStr
|
||||
}
|
||||
|
||||
// SDK or FCT release
|
||||
if((imageType == 'fct') || (imageType == 'sdk')) {
|
||||
return actualBaseVersionString
|
||||
}
|
||||
|
||||
// nightly/incremental release
|
||||
def buildnbr = getTopUpstreamBuildNumber()
|
||||
String nightlyPart = actualBaseVersionString + ".Test${buildnbr}"
|
||||
return nightlyPart
|
||||
}
|
||||
|
||||
def getVersionString(versionParam, imageType) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
sh 'git fetch -ap'
|
||||
sh 'git fetch -t'
|
||||
}
|
||||
def gitCmd = "git describe --tags"
|
||||
|
||||
if(!isRelease(versionParam)) {
|
||||
gitCmd = "${gitCmd} --dirty"
|
||||
}
|
||||
gitversion = sh(returnStdout: true, script: "${gitCmd}").trim()
|
||||
String[] versionArr = "${gitversion}".split("-")
|
||||
|
||||
versionArr[0] = buildVersionString(imageType, versionArr[0], versionParam)
|
||||
rlsVersion = versionArr.join("-")
|
||||
return rlsVersion
|
||||
}
|
||||
|
||||
|
||||
def changeDistroVersion(versionString){
|
||||
println "Set the distro version to ${versionString}..."
|
||||
def versionTag = "DISTRO_VERSION = \"${versionString}\""
|
||||
writeFile(file: "${env.DISTRO_VERSION_PATHNAME}", text: "${versionTag}")
|
||||
sh(script: "cp ${env.DISTRO_VERSION_PATHNAME} ${env.DISTRO_VERSION_FILE}")
|
||||
}
|
||||
|
||||
def cleanupDistroVersion() {
|
||||
println "cleaning repository regarding distro version..."
|
||||
sh(script:"git clean -f ${env.DISTRO_VERSION_PATHNAME}")
|
||||
}
|
||||
|
||||
|
||||
def archiveImages(imgageDir, imgType) {
|
||||
dir ('tmp/artifacts') {
|
||||
zip archive: true, dir: "${WORKSPACE}/${imgageDir}", glob: "*", zipFile: "${env.PACKAGE_NAME}-${env.BUILD_VERSION}-${params.MACHINE}-${imgType}.zip"
|
||||
}
|
||||
sh "rm -rf ${WORKSPACE}/tmp/artifacts"
|
||||
}
|
||||
|
||||
|
||||
def syncSources(src, dst) {
|
||||
def hasSrcUrl = (src.contains("http"))
|
||||
def from = src
|
||||
def to = dst
|
||||
|
||||
// convert the URL into ssh syntax:
|
||||
def url = (hasSrcUrl) ? src : dst
|
||||
String[] repoParts = url.split("//")[1].split("/")
|
||||
repoParts[0] = "build_user@" + repoParts[0] + ":/repo/repo"
|
||||
sshSrc = repoParts.join("/")
|
||||
|
||||
if(hasSrcUrl) {
|
||||
println "getting data from server..."
|
||||
from = sshSrc
|
||||
sh(script: "bash -c \"mkdir -p ${to}\"")
|
||||
}
|
||||
else {
|
||||
println "putting data to server..."
|
||||
to = sshSrc
|
||||
}
|
||||
|
||||
sshagent (credentials: ['7767e711-08a4-4c71-b080-197253dd7392']) {
|
||||
sh "set +x && rsync -q -auvz --ignore-existing -e \"ssh\" ${from}/* ${to}"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def getAutoRevHashes(envType) {
|
||||
def env = "${envType}" == "" ? "" : "${envType}-"
|
||||
def revs = sh(returnStdout: true, script: "bash -c \". ./env/distro/${env}image > /dev/null && buildhistory-collect-srcrevs\"").trim()
|
||||
return revs
|
||||
}
|
||||
|
||||
|
||||
// !!Important Boilerplate!!
|
||||
// The external code must return it's contents as an object
|
||||
return this;
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
// Loading code requires a NODE context
|
||||
// But we want the code accessible outside the node Context
|
||||
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||
def yoctocommon
|
||||
|
||||
|
||||
// declarative pipeline
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label "${params.NODE_NAME}"
|
||||
}
|
||||
}
|
||||
|
||||
parameters {
|
||||
choice(name: 'MACHINE', choices: ['select...', 'armada-385-nrhw18', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||
choice(name: 'IMAGE_TYPE', choices: ['lava', 'fct', 'minimal'], description: 'choose image type')
|
||||
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||
string(name: 'NODE_NAME', defaultValue: 'lxbuild3', description: 'Overwrite default node name to build on')
|
||||
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||
}
|
||||
|
||||
environment {
|
||||
IMG_OUTPUT_DIR = "tmp/build-output"
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 5, unit: 'HOURS')
|
||||
buildDiscarder(
|
||||
logRotator(numToKeepStr: '40',
|
||||
daysToKeepStr: '7',
|
||||
artifactNumToKeepStr: '40',
|
||||
artifactDaysToKeepStr: '7'
|
||||
)
|
||||
)
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('prepare') {
|
||||
steps {
|
||||
script {
|
||||
if("${params.MACHINE}" == "select...") {
|
||||
currentBuild.result = 'ABORTED'
|
||||
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||
}
|
||||
|
||||
// this definition is needed for selecting the
|
||||
// correct build directory
|
||||
env.BUILD_DIR_POSTFIX = "${params.IMAGE_TYPE}" == "lava" ? "-fct" : "-${params.IMAGE_TYPE}"
|
||||
|
||||
// take the correct user ID for the ssh connection of
|
||||
// the belonging build server
|
||||
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||
}
|
||||
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||
}
|
||||
else {
|
||||
env.SSH_ID = ''
|
||||
}
|
||||
println "SSH_ID used: ${env.SSH_ID}"
|
||||
|
||||
// load yocto common file
|
||||
env.ROOTDIR = pwd()
|
||||
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||
|
||||
// clean-up no longer needed packages
|
||||
yoctocommon.cleanWorkspace()
|
||||
|
||||
// Prepare Build Environment
|
||||
env.YOCTO_DEPLOYS = "${env.BUILD_DEPLOY_DIR}/${params.MACHINE}"
|
||||
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "${params.IMAGE_TYPE}")
|
||||
env.BUILD_VERSION = "${version}"
|
||||
currentBuild.displayName = "${version}-${params.MACHINE}-${params.IMAGE_TYPE}" //replace Bitbake timestamp after building
|
||||
printJobParameters()
|
||||
yoctocommon.changeDistroVersion("${version}")
|
||||
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||
}
|
||||
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||
}
|
||||
}
|
||||
|
||||
stage('clean') {
|
||||
when { expression { return params.CLEAN_BUILD } }
|
||||
steps {
|
||||
script {
|
||||
cleaning(params.IMAGE_TYPE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('build') {
|
||||
steps {
|
||||
script {
|
||||
build(params.IMAGE_TYPE, params.IMAGE_TYPE)
|
||||
createArchive(params.IMAGE_TYPE, env.IMG_OUTPUT_DIR)
|
||||
yoctocommon.archiveImages(env.IMG_OUTPUT_DIR, params.IMAGE_TYPE)
|
||||
sh "rm -rf ${IMG_OUTPUT_DIR}"
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('collect versions') {
|
||||
steps {
|
||||
script {
|
||||
revisions = yoctocommon.getAutoRevHashes(params.IMAGE_TYPE)
|
||||
writeFile(file: "${env.AUTOREV_VERSION_FILE}", text: "${revisions}")
|
||||
}
|
||||
}
|
||||
post {
|
||||
success {
|
||||
archiveArtifacts(artifacts: "${env.SUBMODULE_VERION_FILE}, ${env.AUTOREV_VERSION_FILE}, ${env.DISTRO_VERSION_FILE}", onlyIfSuccessful: false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // stages
|
||||
}
|
||||
|
||||
|
||||
def printJobParameters() {
|
||||
println "----------------------------------\n\
|
||||
Job Parameters:\n\
|
||||
----------------------------------\n\
|
||||
MACHINE = ${params.MACHINE}\n\
|
||||
IMAGE_TYPE = ${params.IMAGE_TYPE}\n\
|
||||
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||
--> version = ${env.BUILD_VERSION}\n\
|
||||
----------------------------------\n"
|
||||
}
|
||||
|
||||
|
||||
def cleaning(envType) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
def envPostFix = "${envType}" == "" ? "" : "${envType}-"
|
||||
sh "bash -c '. ./env/distro/${envPostFix}image > /dev/null && bitbake -q -fc cleanall virtual/netmodule-image'"
|
||||
}
|
||||
dir ("${env.SHARED_BUILD}/tmp") { deleteDir() }
|
||||
dir ("${env.SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||
}
|
||||
|
||||
|
||||
def build(envType, imgType) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
def envPostFix = "${envType}" == "" ? "" : "${envType}-"
|
||||
sh "bash -c '. ./env/distro/${envPostFix}image > /dev/null && bitbake -q -k virtual/netmodule-image'"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def createArchive(imgType, outputDir) {
|
||||
def imgTypePostfix = "${imgType}" == "" ? "" : "-${imgType}"
|
||||
dir (outputDir) {
|
||||
def image_basename = "netmodule-linux-image${imgTypePostfix}-${params.MACHINE}"
|
||||
def basename_built = "${env.YOCTO_DEPLOYS}/${image_basename}"
|
||||
def basename_archive = "./image${imgTypePostfix}-${params.MACHINE}"
|
||||
|
||||
sh "cp ${basename_built}.manifest ${basename_archive}.manifest"
|
||||
sh "bash -c '${WORKSPACE}/openembedded-core/scripts/buildhistory-collect-srcrevs -p ${env.BUILD_HISTORY_DIR} > srcrev-${params.MACHINE}${imgTypePostfix}.inc'"
|
||||
|
||||
sh label: 'Copy License Manifest', returnStatus: true, script: """
|
||||
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/netmodule-linux-image${imgTypePostfix}* | tail -n 1)
|
||||
cp \$LATEST_LICENSE_DIR/license.manifest ${basename_archive}_license.manifest
|
||||
"""
|
||||
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/fitImage-${image_basename}-${params.MACHINE} fitImage-${image_basename}"
|
||||
sh "cp ${basename_built}.cpio.gz ${basename_archive}.cpio.gz"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,264 +0,0 @@
|
|||
// Loading code requires a NODE context
|
||||
// But we want the code accessible outside the node Context
|
||||
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||
def yoctocommon
|
||||
|
||||
|
||||
// declarative pipeline
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label "${params.NODE_NAME}"
|
||||
}
|
||||
}
|
||||
|
||||
parameters {
|
||||
choice(name: 'MACHINE', choices: ['select...', 'armada-385-nrhw18', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||
choice(name: 'IMAGE_TYPE', choices: ['dev', 'bootloader', 'release', 'vcu'], description: 'choose image type')
|
||||
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||
string(name: 'NODE_NAME', defaultValue: 'lxbuild4', description: 'Overwrite default node name to build on')
|
||||
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||
}
|
||||
|
||||
environment {
|
||||
WORK_DIR = "tmp"
|
||||
IMG_OUTPUT_DIR = "${WORK_DIR}/build-output"
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 5, unit: 'HOURS')
|
||||
buildDiscarder(
|
||||
logRotator(numToKeepStr: '80',
|
||||
daysToKeepStr: '7',
|
||||
artifactNumToKeepStr: '80',
|
||||
artifactDaysToKeepStr: '7'
|
||||
)
|
||||
)
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('prepare') {
|
||||
steps {
|
||||
script {
|
||||
if("${params.MACHINE}" == "select...") {
|
||||
currentBuild.result = 'ABORTED'
|
||||
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||
}
|
||||
|
||||
cleanLeftOvers(env.IMG_OUTPUT_DIR)
|
||||
|
||||
// this definition is needed for selecting the
|
||||
// correct build directory
|
||||
env.BUILD_DIR_POSTFIX = ""
|
||||
|
||||
// take the correct user ID for the ssh connection of
|
||||
// the belonging build server
|
||||
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||
}
|
||||
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||
}
|
||||
else {
|
||||
env.SSH_ID = ''
|
||||
}
|
||||
println "SSH_ID used: ${env.SSH_ID}"
|
||||
|
||||
// load yocto common file
|
||||
env.ROOTDIR = pwd()
|
||||
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||
|
||||
// clean-up no longer needed packages
|
||||
yoctocommon.cleanWorkspace()
|
||||
|
||||
// Prepare Build Environment
|
||||
env.YOCTO_DEPLOYS = "${env.BUILD_DEPLOY_DIR}/${params.MACHINE}"
|
||||
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "${params.IMAGE_TYPE}")
|
||||
env.BUILD_VERSION = "${version}"
|
||||
currentBuild.displayName = "${version}-${params.MACHINE}-${params.IMAGE_TYPE}" //replace Bitbake timestamp after building
|
||||
printJobParameters()
|
||||
yoctocommon.changeDistroVersion("${version}")
|
||||
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||
}
|
||||
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||
}
|
||||
}
|
||||
|
||||
stage('clean') {
|
||||
when { expression { return params.CLEAN_BUILD } }
|
||||
steps {
|
||||
script {
|
||||
cleaning(params.IMAGE_TYPE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('build') {
|
||||
steps {
|
||||
script {
|
||||
build(params.IMAGE_TYPE)
|
||||
|
||||
// archive a bootloader package:
|
||||
// the bootloader is also built with the dev image, hence we combine it
|
||||
def isBootLoaderOnly = (params.IMAGE_TYPE == 'bootloader').toBoolean()
|
||||
if(isBootLoaderOnly || (params.IMAGE_TYPE == 'dev')) {
|
||||
createBslPackage(env.IMG_OUTPUT_DIR, params.MACHINE)
|
||||
archivePackage(yoctocommon, env.IMG_OUTPUT_DIR, 'bootloader', !isBootLoaderOnly)
|
||||
}
|
||||
|
||||
// archive an image package:
|
||||
// skip for bootloader only builds
|
||||
if(!isBootLoaderOnly) {
|
||||
createImagePackage(params.IMAGE_TYPE, env.IMG_OUTPUT_DIR)
|
||||
archiveOSTreeArtifact(env.IMG_OUTPUT_DIR)
|
||||
archivePackage(yoctocommon, env.IMG_OUTPUT_DIR, params.IMAGE_TYPE, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
cleanLeftOvers(env.WORK_DIR)
|
||||
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('collect versions') {
|
||||
steps {
|
||||
script {
|
||||
revisions = yoctocommon.getAutoRevHashes('ostree')
|
||||
writeFile(file: "${env.AUTOREV_VERSION_FILE}", text: "${revisions}")
|
||||
}
|
||||
}
|
||||
post {
|
||||
success {
|
||||
archiveArtifacts(artifacts: "${env.SUBMODULE_VERION_FILE}, ${env.AUTOREV_VERSION_FILE}, ${env.DISTRO_VERSION_FILE}", onlyIfSuccessful: false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // stages
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def printJobParameters() {
|
||||
println "----------------------------------\n\
|
||||
Job Parameters:\n\
|
||||
----------------------------------\n\
|
||||
MACHINE = ${params.MACHINE}\n\
|
||||
IMAGE_TYPE = ${params.IMAGE_TYPE}\n\
|
||||
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||
--> version = ${env.BUILD_VERSION}\n\
|
||||
----------------------------------\n"
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def cleanLeftOvers(cleaningDir) {
|
||||
sh "rm -rf ${cleaningDir}"
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def cleaning(imgType) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
def tgtImgType = "${imgType}" == "bootloader" ? "dev" : "${imgType}"
|
||||
tgtImgType = "${tgtImgType}" == "release" ? "" : "${tgtImgType}"
|
||||
def imgTypePostfix = "${tgtImgType}" == "" ? "" : "-${tgtImgType}"
|
||||
|
||||
def distroPostfix = "ostree"
|
||||
if (tgtImgType == "vcu")
|
||||
distroPostfix = "vcu"
|
||||
|
||||
sh "bash -c '. ./env/distro/${distroPostfix}-image > /dev/null && bitbake -q -fc cleanall netmodule-linux-image${imgTypePostfix}'"
|
||||
}
|
||||
dir ("${env.SHARED_BUILD}/tmp") { deleteDir() }
|
||||
dir ("${env.SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def build(imgType) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
def tgtImgType = "${imgType}" == "bootloader" ? "dev" : "${imgType}"
|
||||
tgtImgType = "${tgtImgType}" == "release" ? "" : "${tgtImgType}"
|
||||
def imgTypePostfix = "${tgtImgType}" == "" ? "" : "-${tgtImgType}"
|
||||
|
||||
def distroPostfix = "ostree"
|
||||
if (tgtImgType == "vcu")
|
||||
distroPostfix = "vcu"
|
||||
|
||||
sh "bash -c '. ./env/distro/${distroPostfix}-image > /dev/null && bitbake -q -k netmodule-linux-image${imgTypePostfix}'"
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def createBslPackage(outputDir, machine) {
|
||||
dir(outputDir) {
|
||||
String machineStr = machine
|
||||
String[] machineArr = machineStr.split('-')
|
||||
|
||||
println "get bootloader artifacts for package..."
|
||||
if(machineArr[0] == 'imx8') {
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/imx-boot ."
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/imx-boot.sd ."
|
||||
}
|
||||
else if(machineArr[0] == 'armada') {
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-spl.kwb ."
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.kwb ."
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.xmodem.bin ."
|
||||
}
|
||||
else {
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.img ."
|
||||
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.xmodem.bin ."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def createImagePackage(imgType, outputDir) {
|
||||
dir (outputDir) {
|
||||
println "get image artifacts for package..."
|
||||
def imgTypePostfix = "${imgType}" == "" ? "" : "-${imgType}"
|
||||
imgTypePostfix = "${imgTypePostfix}" == "-release" ? "" : "${imgTypePostfix}"
|
||||
def image_basename = "netmodule-linux-image${imgTypePostfix}-${params.MACHINE}"
|
||||
def basename_built = "${env.YOCTO_DEPLOYS}/${image_basename}"
|
||||
def basename_archive = "./image${imgTypePostfix}-${params.MACHINE}"
|
||||
|
||||
sh "cp ${basename_built}.manifest ${basename_archive}.manifest"
|
||||
sh "bash -c '${WORKSPACE}/openembedded-core/scripts/buildhistory-collect-srcrevs -p ${env.BUILD_HISTORY_DIR} > srcrev-${params.MACHINE}${imgTypePostfix}.inc'"
|
||||
|
||||
sh label: 'Copy License Manifest', returnStatus: true, script: """
|
||||
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/netmodule-linux-image${imgTypePostfix}* | tail -n 1)
|
||||
cp \$LATEST_LICENSE_DIR/license.manifest ${basename_archive}_license.manifest
|
||||
"""
|
||||
|
||||
sh label: 'Copy initramfs License Manifest', returnStatus: true, script: """
|
||||
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/initramfs-ostree-image-${params.MACHINE}-* | tail -n 1)
|
||||
cp \$LATEST_LICENSE_DIR/license.manifest initramfs-ostree-image_license.manifest
|
||||
"""
|
||||
|
||||
sh "cp ${basename_built}.ota-ext4 ${basename_archive}.ota-ext4"
|
||||
sh "cp ${basename_built}.wic ${basename_archive}.wic"
|
||||
sh "tar czf ./ostree_repo${imgTypePostfix}.tar.gz -C ${env.YOCTO_DEPLOYS}/ostree_repo ."
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def archivePackage(yoctocommon, pkgDir, imgType, doCleanPkgDir) {
|
||||
yoctocommon.archiveImages(pkgDir, imgType)
|
||||
if(doCleanPkgDir) {
|
||||
println "cleaning pkgDir..."
|
||||
sh "rm -rf ./${pkgDir}/*"
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
def archiveOSTreeArtifact(outputDir) {
|
||||
archiveArtifacts artifacts: "${outputDir}/ostree_repo*.tar.gz", onlyIfSuccessful: true
|
||||
sh "rm -f ./${outputDir}/ostree_repo*.tar.gz"
|
||||
}
|
||||
188
Jenkinsfile_sdk
188
Jenkinsfile_sdk
|
|
@ -1,188 +0,0 @@
|
|||
// Loading code requires a NODE context
|
||||
// But we want the code accessible outside the node Context
|
||||
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||
def yoctocommon
|
||||
|
||||
|
||||
// declarative pipeline
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label "${params.NODE_NAME}"
|
||||
}
|
||||
}
|
||||
|
||||
parameters {
|
||||
choice(name: 'MACHINE', choices: ['select...', 'armada-385-nrhw18', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||
string(name: 'NODE_NAME', defaultValue: 'oem-ci', description: 'Enter a specific node name')
|
||||
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||
booleanParam(name: 'BUILD_FROM_DEV_IMAGE', defaultValue: false, description: 'build SDK from dev image')
|
||||
}
|
||||
|
||||
environment {
|
||||
// SDK Build Parameter (default is recipe netmodule-linux-sdk)
|
||||
IS_NM_LINUX_SDK = "${!params.BUILD_FROM_DEV_IMAGE}"
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 8, unit: 'HOURS')
|
||||
buildDiscarder(
|
||||
logRotator(numToKeepStr: '6')
|
||||
)
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('prepare') {
|
||||
steps {
|
||||
script {
|
||||
if("${params.MACHINE}" == "select...") {
|
||||
currentBuild.result = 'ABORTED'
|
||||
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||
}
|
||||
|
||||
// this definition is needed for selecting the
|
||||
// correct build directory
|
||||
env.BUILD_DIR_POSTFIX = ""
|
||||
|
||||
// take the correct user ID for the ssh connection of
|
||||
// the belonging build server
|
||||
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||
}
|
||||
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||
}
|
||||
else {
|
||||
error("Declare the NODE_NAME specific to the build agent")
|
||||
}
|
||||
println "SSH_ID used: ${env.SSH_ID}"
|
||||
|
||||
// load yocto common file
|
||||
env.ROOTDIR = pwd()
|
||||
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||
|
||||
// clean-up no longer needed packages
|
||||
yoctocommon.cleanWorkspace()
|
||||
|
||||
// Prepare Build Environment
|
||||
env.YOCTO_DEPLOYS = "${env.SHARED_BUILD}/tmp/deploy/sdk"
|
||||
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
cleanLastBuildArtifacts()
|
||||
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "sdk")
|
||||
env.BUILD_VERSION = "${version}"
|
||||
currentBuild.displayName = "${version}-${params.MACHINE}-sdk" //replace Bitbake timestamp after building
|
||||
printJobParameters()
|
||||
yoctocommon.changeDistroVersion("${version}")
|
||||
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||
}
|
||||
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||
}
|
||||
}
|
||||
|
||||
stage('clean') {
|
||||
when { expression { return params.CLEAN_BUILD } }
|
||||
steps {
|
||||
script {
|
||||
cleaning(env.IS_NM_LINUX_SDK.toBoolean())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('build') {
|
||||
steps {
|
||||
script {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
if(env.IS_NM_LINUX_SDK.toBoolean()) {
|
||||
sh "bash -c '. ./env/distro/ostree-image && bitbake -q netmodule-linux-sdk'"
|
||||
}
|
||||
else {
|
||||
sh "bash -c '. ./env/distro/ostree-image && bitbake -q -fc populate_sdk netmodule-linux-image-dev'"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
success {
|
||||
script{
|
||||
deploySdkToArtifactory(params.BUILD_FROM_DEV_IMAGE, "${params.MACHINE}")
|
||||
}
|
||||
} // success
|
||||
always {
|
||||
script {
|
||||
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||
}
|
||||
} // always
|
||||
}
|
||||
}
|
||||
|
||||
} // stages
|
||||
}
|
||||
|
||||
|
||||
def printJobParameters() {
|
||||
println "----------------------------------\n\
|
||||
Job Parameters:\n\
|
||||
----------------------------------\n\
|
||||
MACHINE = ${params.MACHINE}\n\
|
||||
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||
IS_NM_LINUX_SDK = ${env.IS_NM_LINUX_SDK}\n\
|
||||
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||
--> version = ${env.BUILD_VERSION}\n\
|
||||
----------------------------------\n"
|
||||
}
|
||||
|
||||
|
||||
def cleanLastBuildArtifacts() {
|
||||
println "cleaning artifacts from last build..."
|
||||
sh "rm -f ${env.YOCTO_DEPLOYS}/*"
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
sh "bash -c '. ./env/distro/ostree-image > /dev/null && git fetch -ap && bitbake -q -c cleanall netmodule-linux-sdk'"
|
||||
}
|
||||
}
|
||||
|
||||
def cleaning(isNmSdk) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
if(env.IS_NM_LINUX_SDK.toBoolean()) {
|
||||
sh "bash -c '. ./env/distro/ostree-image && bitbake -q -fc cleanall netmodule-linux-sdk'"
|
||||
}
|
||||
else {
|
||||
sh "bash -c '. ./env/distro/ostree-image && bitbake -q -fc cleanall netmodule-linux-image-dev'"
|
||||
}
|
||||
}
|
||||
dir ("${env.SHARED_BUILD}/tmp") { deleteDir() }
|
||||
dir ("${env.SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||
}
|
||||
|
||||
|
||||
|
||||
def deploySdkToArtifactory(isBuildFromDev, machine) {
|
||||
def cpuType = machine.split("-")[0]
|
||||
def sdkArtifact = sh(returnStdout: true, script: "ls ${env.YOCTO_DEPLOYS}/netmodule-linux-ostree*-netmodule-linux-*.sh").trim()
|
||||
if(isBuildFromDev) {
|
||||
def jenkinsBaseUrl = "https://jenkins.netmodule.intranet"
|
||||
def workspaceUrl = "${jenkinsBaseUrl}/job/NMOS/job/build-sdk/job/${env.BRANCH_NAME}/${currentBuild.number}/execution/node/3/ws/build/tmp/deploy/sdk"
|
||||
def artifactFile = sh(returnStdout: true, script: "basename ${sdkArtifact}")
|
||||
println "Download link of image-sdk (cpuType=${cpuType}): ${workspaceUrl}/${artifactFile}"
|
||||
return
|
||||
}
|
||||
|
||||
println "Deploying ${sdkArtifact} (cpuType=${cpuType}) to NEXUS..."
|
||||
nexusArtifactUploader(
|
||||
nexusVersion: "nexus3",
|
||||
protocol: "https",
|
||||
nexusUrl: "repo.netmodule.intranet:443",
|
||||
groupId: "nm.sdk",
|
||||
version: "latest",
|
||||
repository: "nm-os",
|
||||
credentialsId: "0099cd5a-81d4-4698-9b55-1206895d19fb",
|
||||
artifacts: [
|
||||
[artifactId: "${cpuType}",
|
||||
classifier: "",
|
||||
file: "${sdkArtifact}",
|
||||
type: "sh"]
|
||||
]
|
||||
);
|
||||
}
|
||||
|
|
@ -1,161 +0,0 @@
|
|||
// declarative pipeline
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'lxbuild4'
|
||||
}
|
||||
}
|
||||
|
||||
parameters {
|
||||
booleanParam(name: 'UPDATE_NM_PARTS', defaultValue: true, description: 'update the netmodule submodules (e.g. before releasing)')
|
||||
booleanParam(name: 'UPDATE_COMMUNITY_PARTS', defaultValue: false, description: 'update the the community submodules (maintenance/security updates)')
|
||||
}
|
||||
|
||||
environment {
|
||||
SUBMODULE_VERSION_FILE = "submodule_revisions"
|
||||
SOURCE_REVISION_UPDATE_LOG = "src_rev_update.log"
|
||||
SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 1, unit: 'HOURS')
|
||||
buildDiscarder(
|
||||
logRotator(numToKeepStr: '5')
|
||||
)
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('prepare') {
|
||||
steps {
|
||||
script {
|
||||
if(params.UPDATE_NM_PARTS) {
|
||||
currentBuild.displayName += "-nm"
|
||||
}
|
||||
if(params.UPDATE_COMMUNITY_PARTS) {
|
||||
currentBuild.displayName += "-community"
|
||||
}
|
||||
printJobParameters()
|
||||
prepareUpdate()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('update to head') {
|
||||
steps {
|
||||
script {
|
||||
updateSubmodules(params.UPDATE_NM_PARTS, params.UPDATE_COMMUNITY_PARTS)
|
||||
updateSourceRevisions(params.UPDATE_NM_PARTS)
|
||||
}
|
||||
}
|
||||
post {
|
||||
success {
|
||||
archiveArtifacts(artifacts: "${env.SUBMODULE_VERSION_FILE}", onlyIfSuccessful: false)
|
||||
sh "rm -f ${env.SUBMODULE_VERSION_FILE}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('commit') {
|
||||
steps {
|
||||
commitChanges(params.UPDATE_NM_PARTS, params.UPDATE_COMMUNITY_PARTS)
|
||||
}
|
||||
}
|
||||
|
||||
} // stages
|
||||
}
|
||||
|
||||
|
||||
def printJobParameters() {
|
||||
def node_name = "${NODE_NAME}"
|
||||
println "Running on agent: ${node_name}\n\n"
|
||||
println "----------------------------------\n\
|
||||
Job Parameters:\n\
|
||||
----------------------------------\n\
|
||||
UPDATE_NM_PARTS = ${params.UPDATE_NM_PARTS}\n\
|
||||
UPDATE_COMMUNITY_PARTS = ${params.UPDATE_COMMUNITY_PARTS}\n\
|
||||
----------------------------------\n"
|
||||
}
|
||||
|
||||
def prepareUpdate() {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
sh 'git submodule update --init' // init submodules used if first checkout
|
||||
}
|
||||
def userId = "${currentBuild.getBuildCauses()[0].userId}"
|
||||
def userName = "${currentBuild.getBuildCauses()[0].userName}"
|
||||
if("${userId}" == "null") {
|
||||
userId = "downstream"
|
||||
userName = "Jenkins"
|
||||
}
|
||||
env.TRIGGERED_USER = "${userName} (userId=${userId})"
|
||||
|
||||
def notNmUpdate = "${!params.UPDATE_NM_PARTS}"
|
||||
def notCommunityUpdate = "${!params.UPDATE_COMMUNITY_PARTS}"
|
||||
if(notNmUpdate.toBoolean() && notCommunityUpdate.toBoolean()) {
|
||||
error("Nothing to update selected - both parameters are false")
|
||||
}
|
||||
}
|
||||
|
||||
def updateSubmodules(isNmUpdate, isCommunityUpdate) {
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
if(isNmUpdate) {
|
||||
sh(script: "git submodule update --remote --rebase meta-netmodule-*")
|
||||
}
|
||||
if(isCommunityUpdate) {
|
||||
sh(script: "git submodule update --remote --rebase \$(git submodule status | grep -v \"meta-netmodule-*\" | sed 's/^ *//g' | cut -d' ' -f2)")
|
||||
}
|
||||
}
|
||||
submoduleStatus = sh(returnStdout: true, script: "git submodule status").trim() // print submodule hashes to jenkins log
|
||||
println "${submoduleStatus}"
|
||||
writeFile(file: "${env.SUBMODULE_VERSION_FILE}", text: "${submoduleStatus}")
|
||||
}
|
||||
|
||||
|
||||
def updateMachineSrcRevs(machine) {
|
||||
// set the reference machine to be able to load the yocto environment (used for devtool)
|
||||
env.MACHINE = "${machine}"
|
||||
println "update source revisions for ${env.MACHINE} to head..."
|
||||
sh("echo '==> ${env.MACHINE} =======================' >> ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||
sh(returnStdout: true, script: "bash -c '. ./env/distro/ostree-image > /dev/null && cd ../ && ./src-rev.sh -v -d -r -l ./srcrev.log -a ./autorev-packages.inc'")
|
||||
sh("cat ./srcrev.log >> ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||
}
|
||||
|
||||
|
||||
def updateSourceRevisions(isNmUpdate) {
|
||||
if(!isNmUpdate) {
|
||||
// Netmodule layers are not called to update --> returning
|
||||
return
|
||||
}
|
||||
sh("echo '================== UPDATE SOURCE REVISIONS ==================' > ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||
updateMachineSrcRevs("am335x-hw26")
|
||||
updateMachineSrcRevs("imx8-nmhw23")
|
||||
sh(returnStdout: true, script: "git checkout build/conf/bblayers.conf")
|
||||
}
|
||||
|
||||
def commitSourceRevisionChanges() {
|
||||
println "commit source revision changes..."
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
sh(returnStdout: true, script: "./src-rev-commit.sh -b develop -v -l ./srcrev-commit.log")
|
||||
}
|
||||
sh "cat ./srcrev-commit.log >> ./${env.SOURCE_REVISION_UPDATE_LOG}"
|
||||
}
|
||||
|
||||
def commitChanges(isNmUpdate, isCommunityUpdate) {
|
||||
String updatedLayers = ""
|
||||
if(isNmUpdate) {
|
||||
commitSourceRevisionChanges()
|
||||
updatedLayers += "netmodule"
|
||||
}
|
||||
if(isNmUpdate && isCommunityUpdate) { updatedLayers += " and " }
|
||||
if(isCommunityUpdate) { updatedLayers += "community" }
|
||||
sh(script: "git checkout ${env.BRANCH_NAME}")
|
||||
sh(script: "git add -u")
|
||||
stagedData = sh(returnStdout: true, script: "git diff --cached").trim()
|
||||
if("${stagedData}" == "") {
|
||||
println "everything up to date, nothing to commit"
|
||||
return
|
||||
}
|
||||
sshagent (credentials: [env.SSH_ID]) {
|
||||
sh(script: "git commit -m \"submodules: updated ${updatedLayers} hashes, triggered by ${env.TRIGGERED_USER}\" && git push")
|
||||
}
|
||||
}
|
||||
|
|
@ -1,130 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SCRIPT_PARAMS="$*"
|
||||
SCRIPT_PATHNAME=$(realpath ${0})
|
||||
SCRIPT_NAME=$(basename ${SCRIPT_PATHNAME})
|
||||
SCRIPT_PATH=$(dirname ${SCRIPT_PATHNAME})
|
||||
|
||||
export LOGFILE=/dev/null
|
||||
export BRANCH_NAME=develop
|
||||
export IS_DUMMY=false
|
||||
export IS_VERBOSE=false
|
||||
|
||||
#**********************************************************************************************
|
||||
# local helper functions
|
||||
#**********************************************************************************************
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function printUsage()
|
||||
{
|
||||
echo -e "\nUsage: ${SCRIPT_NAME} [OPTIONS]\n"
|
||||
echo -e ""
|
||||
echo -e " OPTIONS:"
|
||||
echo -e " -b|--branch=BRANCH_NAME commit to branch BRANCH_NAME (default = ${BRANCH_NAME})"
|
||||
echo -e " -d|--dummy-commit display found SRVREVs"
|
||||
echo -e " -l|--log=LOGFILE write some output to LOGFILE (default = $LOGFILE)"
|
||||
echo -e " -h|--help Show this help"
|
||||
echo -e " -v|--verbose Set script to verbose"
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function logMessage()
|
||||
{
|
||||
local msg="${1}"
|
||||
echo "${msg}" >> $LOGFILE
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function printMessage()
|
||||
{
|
||||
local msg="${1}"
|
||||
logMessage "${msg}"
|
||||
if [[ "${IS_VERBOSE}" == "false" ]]; then
|
||||
return
|
||||
fi
|
||||
echo "${msg}"
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function updateLayerToHead() {
|
||||
local layer="${1}"
|
||||
local branch="${2}"
|
||||
|
||||
logMessage "--> checking out branch ${BRNCH_NAME}"
|
||||
git checkout ${BRANCH_NAME} >> $LOGFILE 2>&1
|
||||
logMessage "--> pull changes ${BRANCH_NAME}"
|
||||
git pull >> $LOGFILE 2>&1
|
||||
}
|
||||
|
||||
#**********************************************************************************************
|
||||
# main
|
||||
#**********************************************************************************************
|
||||
O=$(getopt -o hb:l:vd --long help,branch:,log:,verbose,dummy-commit -- "$@") || exit 1
|
||||
if [ $? != 0 ]; then
|
||||
echo "ERROR: Could not parse command line options"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
eval set -- "$O"
|
||||
while true; do
|
||||
case "${1}" in
|
||||
-b|--branch)
|
||||
export BRANCH_NAME="${2}"
|
||||
shift 2
|
||||
;;
|
||||
-v|--verbose)
|
||||
export IS_VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-d|--dummy-commit)
|
||||
export IS_DUMMY=true
|
||||
shift
|
||||
;;
|
||||
-l|--log)
|
||||
export LOGFILE="${2}"
|
||||
export LOGFILE=$(realpath "${LOGFILE}")
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
export IS_VERBOSE=false
|
||||
printUsage
|
||||
exit 0
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
printUsage; exit 0 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "${SCRIPT_NAME} called with ${SCRIPT_PARAMS}" > $LOGFILE
|
||||
|
||||
|
||||
for nmLayer in $(git status | grep "meta-netmodule" | cut -d':' -f2 | sed -e 's/^[ ]*//' | cut -d' ' -f1); do
|
||||
cd $nmLayer
|
||||
nbrChanges=$(git status | grep "modified:" | wc -l)
|
||||
printMessage "${nbrChanges} changes in layer ${nmLayer}"
|
||||
if [[ "${nbrChanges}" != "0" ]]; then
|
||||
logMessage "--> stash the changes first to checkout branch head..."
|
||||
git stash save >> $LOGFILE 2>&1
|
||||
updateLayerToHead "${nmLayer}" "${BRANCH_NAME}"
|
||||
logMessage "--> get changes back from stash..."
|
||||
git stash pop >> $LOGFILE 2>&1
|
||||
logMessage "--> adding tracked and changed files"
|
||||
git add -u >> $LOGFILE 2>&1
|
||||
printMessage "--> committing and pushing..."
|
||||
commitMsg="${nmLayer}: updated source revisions"
|
||||
if [[ "${IS_DUMMY}" == "true" ]]; then
|
||||
printMessage " dummy commit: msg='${commitMsg}', content:"
|
||||
git status | grep "modified" >> $LOGFILE 2>&1
|
||||
# revert changes to simulate a commit :-P
|
||||
git reset HEAD * > /dev/null
|
||||
git checkout * > /dev/null
|
||||
else
|
||||
git commit -m "${commitMsg}" >> $LOGFILE 2>&1
|
||||
git push >> $LOGFILE 2>&1
|
||||
fi
|
||||
printMessage "----------"
|
||||
fi
|
||||
cd ..
|
||||
done
|
||||
|
||||
exit 0
|
||||
190
src-rev.sh
190
src-rev.sh
|
|
@ -1,190 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SCRIPT_PARAMS="$*"
|
||||
SCRIPT_PATHNAME=$(realpath ${0})
|
||||
SCRIPT_NAME=$(basename ${SCRIPT_PATHNAME})
|
||||
SCRIPT_PATH=$(dirname ${SCRIPT_PATHNAME})
|
||||
YOCTO_DIR="${YOCTO_DIR:-$SCRIPT_PATH}"
|
||||
|
||||
LOGFILE=/dev/null
|
||||
IS_REPLACE_SRCREV=false
|
||||
IS_VERBOSE=false
|
||||
IS_DISPLAY=false
|
||||
LIST=""
|
||||
|
||||
#**********************************************************************************************
|
||||
# local helper functions
|
||||
#**********************************************************************************************
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function printUsage()
|
||||
{
|
||||
echo -e "\nUsage: ${SCRIPT_NAME} [OPTIONS]\n"
|
||||
echo -e "find the source revision of the packages within the netmodule meta layers and"
|
||||
echo -e "display/replace them with the latest hashes."
|
||||
echo -e "NOTE: there is a list containing packages set to AUTOREV. Use option -a to handle"
|
||||
echo -e "only revisions of this list. Otherwise we might get incompatible versions of 3rd"
|
||||
echo -e "party packages.\n"
|
||||
echo -e ""
|
||||
echo -e " OPTIONS:"
|
||||
echo -e " -r|--replace replace SRCREVs"
|
||||
echo -e " -d|--display display found SRVREVs"
|
||||
echo -e " -a|--autorev-list=LIST handle only revisions of LIST (e.g. autorev-packages.inc)"
|
||||
echo -e " -l|--log=LOGFILE write command output to LOGFILE (default = $LOGFILE)"
|
||||
echo -e " -h|--help show this help"
|
||||
echo -e " -v|--verbose set script to verbose"
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function logMessage()
|
||||
{
|
||||
local msg="${1}"
|
||||
echo "${msg}" >> $LOGFILE
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function printMessage()
|
||||
{
|
||||
local msg="${1}"
|
||||
logMessage "${msg}"
|
||||
if [[ "${IS_VERBOSE}" == "false" ]]; then
|
||||
return
|
||||
fi
|
||||
echo "${msg}"
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function checkingEnvironment()
|
||||
{
|
||||
printMessage "> checking environment for devtool..."
|
||||
isEnvLoaded=$(which devtool | wc -l)
|
||||
logMessage "> isEnvLoaded=${isEnvLoaded}"
|
||||
if [[ "${isEnvLoaded}" == "0" ]]; then
|
||||
logMessage "Yocto environment not loaded (devtool not found) --> exiting"
|
||||
echo "Yocto environment not loaded (devtool not found) --> exiting"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function getBBFiles()
|
||||
{
|
||||
if [[ "${LIST}" == "" ]]; then
|
||||
files=$(find ${YOCTO_DIR}/meta-netmodule* -name "*.bb" | xargs -i sh -c "grep -q SRCREV {} && echo {}")
|
||||
else
|
||||
files=$(cat $LIST | grep "#" | cut -d'#' -f2)
|
||||
fi
|
||||
echo "${files}"
|
||||
}
|
||||
#----------------------------------------------------------------------------------------------
|
||||
function displayItem()
|
||||
{
|
||||
local bbfile="${1}"
|
||||
local recipeName="${2}"
|
||||
local revision="${3}"
|
||||
|
||||
printMessage "Recipe: $recipeName"
|
||||
printMessage "New Revision: $revision"
|
||||
printMessage "BB File: $bbfile"
|
||||
if [[ "${IS_DISPLAY}" == "true" && "${IS_VERBOSE}" == "false" ]]; then
|
||||
echo "Recipe: $recipeName"
|
||||
echo "New Revision: $revision"
|
||||
echo "BB File: $bbfile"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
#**********************************************************************************************
|
||||
# main
|
||||
#**********************************************************************************************
|
||||
O=$(getopt -o hl:a:vrd --long help,log:,autorev-list:,verbose,replace,display -- "$@")
|
||||
if [ $? != 0 ]; then
|
||||
echo "ERROR: Could not parse command line options"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
eval set -- "$O"
|
||||
while true; do
|
||||
case "${1}" in
|
||||
-v|--verbose)
|
||||
export IS_VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-d|--display)
|
||||
export IS_DISPLAY=true
|
||||
shift
|
||||
;;
|
||||
-r|--replace)
|
||||
export IS_REPLACE_SRCREV=true
|
||||
shift
|
||||
;;
|
||||
-a|--autorev-list)
|
||||
export LIST="${2}"
|
||||
export LIST=$(realpath "${LIST}")
|
||||
shift 2
|
||||
;;
|
||||
-l|--log)
|
||||
export LOGFILE="${2}"
|
||||
export LOGFILE=$(realpath "${LOGFILE}")
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
export IS_VERBOSE=false
|
||||
printUsage
|
||||
exit 0
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
printUsage; exit 0 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "${SCRIPT_NAME} called with ${SCRIPT_PARAMS}" > $LOGFILE
|
||||
checkingEnvironment
|
||||
|
||||
printMessage "> get bbfiles (LIST='${LIST}')..."
|
||||
bbfiles=$(getBBFiles)
|
||||
logMessage "${bbfiles}"
|
||||
|
||||
printMessage "> getting recipes residing in bbfiles..."
|
||||
recipes=$(echo "$bbfiles" | xargs -i basename {} | sed 's/_.*//' | sed 's/\.bb//')
|
||||
logMessage "${recipes}"
|
||||
|
||||
printMessage "> getting check-upgrade-status..."
|
||||
newcommits=$(devtool check-upgrade-status $recipes 2>&1 | grep "new commits")
|
||||
logMessage "${newcommits}"
|
||||
|
||||
IFS=$'\n'
|
||||
for newcommit in $newcommits; do
|
||||
# We need to restore IFS for sed
|
||||
IFS=$' \t\n'
|
||||
# Get recipe name
|
||||
recipe_name=$(echo $newcommit | cut -d " " -f 2)
|
||||
# Get the last string in line
|
||||
newrev=$(echo $newcommit | sed 's/.* //')
|
||||
logMessage "> newCommit=${newcommit}; recipeName=${recipe_name}; newRev=${newrev}"
|
||||
|
||||
# i acts as bbfile index like bbfile[i]
|
||||
i=1
|
||||
found=0
|
||||
for recipe in $recipes; do
|
||||
if [ "$recipe" == "$recipe_name" ]; then
|
||||
bbfile=$(echo $bbfiles | cut -d " " -f$i)
|
||||
displayItem "${bbfile}" "${recipe}" "${newrev}"
|
||||
found=1
|
||||
if [ "${IS_REPLACE_SRCREV}" == "true" ]; then
|
||||
printMessage " --> updating ${recipe} in ${bbfile} to ${newrev}"
|
||||
sed -i "s/SRCREV.*/SRCREV ?= \"$newrev\"/g" $bbfile
|
||||
fi
|
||||
break
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
if [ "$found" == "0" ]; then
|
||||
logMessage "Recipe ${recipe_name} not found --> exiting"
|
||||
echo "Recipe ${recipe_name} not found"
|
||||
exit 1
|
||||
fi
|
||||
logMessage "--------------------"
|
||||
done
|
||||
|
||||
logMessage "> all recipes handled"
|
||||
exit 0
|
||||
Loading…
Reference in New Issue