Compare commits
No commits in common. "main" and "1.4.2" have entirely different histories.
|
|
@ -0,0 +1,191 @@
|
||||||
|
// common parts for yocto builds
|
||||||
|
|
||||||
|
// BUILD_DIR_POSTFIX needs to be defined in the job before
|
||||||
|
// loading this module
|
||||||
|
env.SHARED_BUILD = "${WORKSPACE}/build${env.BUILD_DIR_POSTFIX}"
|
||||||
|
env.BUILD_DEPLOY_DIR = "${env.SHARED_BUILD}/tmp/deploy/images"
|
||||||
|
env.BUILD_LICENSE_DIR = "${env.SHARED_BUILD}/tmp/deploy/licenses"
|
||||||
|
env.BUILD_HISTORY_DIR = "${env.SHARED_BUILD}/buildhistory"
|
||||||
|
env.DISTRO_VERSION_PATHNAME = "${env.SHARED_BUILD}/conf/distro_version.inc"
|
||||||
|
env.DOWNLOAD_DIR = "${WORKSPACE}/build-common/downloads"
|
||||||
|
env.BINARY_STORAGE_URL = "http://nmrepo.netmodule.intranet/src/yocto-downloads"
|
||||||
|
env.SUBMODULE_VERION_FILE = "submodule_revisions"
|
||||||
|
env.DISTRO_VERSION_FILE = "distro_version.inc"
|
||||||
|
env.AUTOREV_VERSION_FILE = "autorev_revisions.inc"
|
||||||
|
env.PACKAGE_NAME = "nm-os"
|
||||||
|
|
||||||
|
|
||||||
|
echo "loading common yocto build module..."
|
||||||
|
|
||||||
|
|
||||||
|
// Methods declared in external code are accessible
|
||||||
|
// directly from other code in the external file
|
||||||
|
// indirectly via the object created by the load operation
|
||||||
|
// eg. extcode.build(...)
|
||||||
|
|
||||||
|
|
||||||
|
def cleanWorkspace() {
|
||||||
|
println "cleaning workspace..."
|
||||||
|
sh "[ -d \"${WORKSPACE}/build-common/sstate-cache\" ] && find \"${WORKSPACE}/build-common/sstate-cache/\" -name \"sstate*\" -atime +3 -delete || true"
|
||||||
|
sh "[ -d \"${env.SHARED_BUILD}/tmp/work\" ] && find \"${env.SHARED_BUILD}/tmp/work/\" -name temp -type d -exec rm -rf {} + || true"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def isRelease(versionParam) {
|
||||||
|
if((versionParam == "") || (versionParam == "latest")) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def handleSubmodules(versionParam) {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
println "init submodules..."
|
||||||
|
sh 'git submodule update --init'
|
||||||
|
if(isRelease(versionParam)) {
|
||||||
|
println "set submodules to freezed state..."
|
||||||
|
sh 'git submodule update' // set all submodules to freezed commit
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
println "setting netmodule submodule hashes to head..."
|
||||||
|
sh 'git submodule update --remote --rebase meta-netmodule-*' // update our own submodules to HEAD
|
||||||
|
}
|
||||||
|
}
|
||||||
|
submoduleStatus = sh(returnStdout: true, script: "git submodule status").trim() // print submodule hashes to jenkins log
|
||||||
|
println "${submoduleStatus}"
|
||||||
|
writeFile(file: "${env.SUBMODULE_VERION_FILE}", text: "${submoduleStatus}")
|
||||||
|
}
|
||||||
|
|
||||||
|
def handleAutoRevIncludeFile(versionParam) {
|
||||||
|
if(isRelease(versionParam)) {
|
||||||
|
println "suppressing autorev include file..."
|
||||||
|
sh(script: "sed '/autorev-packages/d' -i ${env.SHARED_BUILD}/conf/local.conf")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def cleanupAutoRevIncludeFile(versionParam) {
|
||||||
|
if(isRelease(versionParam)) {
|
||||||
|
println "clean-up suppressed autorev include file..."
|
||||||
|
sh(script: "git checkout ${env.SHARED_BUILD}/conf/local.conf")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def updateSourceRevisions() {
|
||||||
|
println "update source revisions to head..."
|
||||||
|
sh(returnStdout: true, script: "bash -c '. ./env.image-ostree > /dev/null && cd ../ && ./src-rev.sh -v -d -r -l ./srcrev.log -a ./autorev-packages.inc'")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def getTopUpstreamBuildNumber() {
|
||||||
|
// Iterating though all upstream jobs:
|
||||||
|
// currentBuild.upstreamBuilds.each { item ->
|
||||||
|
// echo "upstream build: ${item}"
|
||||||
|
// def nbr = item.getNumber()
|
||||||
|
// echo "nbr=${nbr}"
|
||||||
|
// }
|
||||||
|
def upstreamJobList = currentBuild.upstreamBuilds
|
||||||
|
def nbrOfUpstreamJobs = upstreamJobList.size()
|
||||||
|
if (nbrOfUpstreamJobs == 0)
|
||||||
|
return 0;
|
||||||
|
def topJob = upstreamJobList[nbrOfUpstreamJobs-1]
|
||||||
|
println "Top upstream project: " + topJob.getFullDisplayName()
|
||||||
|
def topJobNbr = topJob.getNumber()
|
||||||
|
println "Top upstream job build Number = ${topJobNbr}"
|
||||||
|
return topJobNbr
|
||||||
|
}
|
||||||
|
|
||||||
|
def buildVersionString(imageType, actualBaseVersionString, versionParameter) {
|
||||||
|
// official release version
|
||||||
|
if(isRelease(versionParameter)) {
|
||||||
|
String newVersionStr = versionParameter
|
||||||
|
return newVersionStr
|
||||||
|
}
|
||||||
|
|
||||||
|
// SDK or FCT release
|
||||||
|
if((imageType == 'fct') || (imageType == 'sdk')) {
|
||||||
|
return actualBaseVersionString
|
||||||
|
}
|
||||||
|
|
||||||
|
// nightly/incremental release
|
||||||
|
def buildnbr = getTopUpstreamBuildNumber()
|
||||||
|
String nightlyPart = actualBaseVersionString + ".Test${buildnbr}"
|
||||||
|
return nightlyPart
|
||||||
|
}
|
||||||
|
|
||||||
|
def getVersionString(versionParam, imageType) {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
sh 'git fetch -ap'
|
||||||
|
sh 'git fetch -t'
|
||||||
|
}
|
||||||
|
def gitCmd = "git describe --tags"
|
||||||
|
|
||||||
|
if(!isRelease(versionParam)) {
|
||||||
|
gitCmd = "${gitCmd} --dirty"
|
||||||
|
}
|
||||||
|
gitversion = sh(returnStdout: true, script: "${gitCmd}").trim()
|
||||||
|
String[] versionArr = "${gitversion}".split("-")
|
||||||
|
|
||||||
|
versionArr[0] = buildVersionString(imageType, versionArr[0], versionParam)
|
||||||
|
rlsVersion = versionArr.join("-")
|
||||||
|
return rlsVersion
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def changeDistroVersion(versionString){
|
||||||
|
println "Set the distro version to ${versionString}..."
|
||||||
|
def versionTag = "DISTRO_VERSION = \"${versionString}\""
|
||||||
|
writeFile(file: "${env.DISTRO_VERSION_PATHNAME}", text: "${versionTag}")
|
||||||
|
sh(script: "cp ${env.DISTRO_VERSION_PATHNAME} ${env.DISTRO_VERSION_FILE}")
|
||||||
|
}
|
||||||
|
|
||||||
|
def cleanupDistroVersion() {
|
||||||
|
println "cleaning repository regarding distro version..."
|
||||||
|
sh(script:"git clean -f ${env.DISTRO_VERSION_PATHNAME}")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def archiveImages(imgageDir, imgType) {
|
||||||
|
dir ('tmp/artifacts') {
|
||||||
|
zip archive: true, dir: "${WORKSPACE}/${imgageDir}", glob: "*", zipFile: "${env.PACKAGE_NAME}-${env.BUILD_VERSION}-${params.MACHINE}-${imgType}.zip"
|
||||||
|
}
|
||||||
|
sh "rm -rf ${WORKSPACE}/tmp/artifacts"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def syncSources(src, dst) {
|
||||||
|
def hasSrcUrl = (src.contains("http"))
|
||||||
|
def from = src
|
||||||
|
def to = dst
|
||||||
|
|
||||||
|
// convert the URL into ssh syntax:
|
||||||
|
def url = (hasSrcUrl) ? src : dst
|
||||||
|
String[] repoParts = url.split("//")[1].split("/")
|
||||||
|
repoParts[0] = "build_user@" + repoParts[0] + ":/repo/repo"
|
||||||
|
sshSrc = repoParts.join("/")
|
||||||
|
|
||||||
|
if(hasSrcUrl) {
|
||||||
|
println "getting data from server..."
|
||||||
|
from = sshSrc
|
||||||
|
sh(script: "bash -c \"mkdir -p ${to}\"")
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
println "putting data to server..."
|
||||||
|
to = sshSrc
|
||||||
|
}
|
||||||
|
|
||||||
|
sshagent (credentials: ['7767e711-08a4-4c71-b080-197253dd7392']) {
|
||||||
|
sh "set +x && rsync -q -auvz --ignore-existing -e \"ssh\" ${from}/* ${to}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def getAutoRevHashes(envType) {
|
||||||
|
def env = "${envType}" == "" ? "" : "-${envType}"
|
||||||
|
def revs = sh(returnStdout: true, script: "bash -c \". ./env.image${env} > /dev/null && buildhistory-collect-srcrevs\"").trim()
|
||||||
|
return revs
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// !!Important Boilerplate!!
|
||||||
|
// The external code must return it's contents as an object
|
||||||
|
return this;
|
||||||
|
|
@ -0,0 +1,171 @@
|
||||||
|
// Loading code requires a NODE context
|
||||||
|
// But we want the code accessible outside the node Context
|
||||||
|
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||||
|
def yoctocommon
|
||||||
|
|
||||||
|
|
||||||
|
// declarative pipeline
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node {
|
||||||
|
label "${params.NODE_NAME}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parameters {
|
||||||
|
choice(name: 'MACHINE', choices: ['select...', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||||
|
choice(name: 'IMAGE_TYPE', choices: ['lava', 'fct', 'minimal'], description: 'choose image type')
|
||||||
|
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||||
|
string(name: 'NODE_NAME', defaultValue: 'lxbuild3', description: 'Overwrite default node name to build on')
|
||||||
|
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
IMG_OUTPUT_DIR = "tmp/build-output"
|
||||||
|
}
|
||||||
|
|
||||||
|
options {
|
||||||
|
timeout(time: 5, unit: 'HOURS')
|
||||||
|
buildDiscarder(
|
||||||
|
logRotator(numToKeepStr: '50',
|
||||||
|
daysToKeepStr: '7',
|
||||||
|
artifactNumToKeepStr: '50',
|
||||||
|
artifactDaysToKeepStr: '7'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
disableConcurrentBuilds()
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('prepare') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
if("${params.MACHINE}" == "select...") {
|
||||||
|
currentBuild.result = 'ABORTED'
|
||||||
|
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||||
|
}
|
||||||
|
|
||||||
|
// this definition is needed for selecting the
|
||||||
|
// correct build directory
|
||||||
|
env.BUILD_DIR_POSTFIX = "${params.IMAGE_TYPE}" == "lava" ? "-fct" : "-${params.IMAGE_TYPE}"
|
||||||
|
|
||||||
|
// take the correct user ID for the ssh connection of
|
||||||
|
// the belonging build server
|
||||||
|
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||||
|
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||||
|
}
|
||||||
|
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||||
|
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
env.SSH_ID = ''
|
||||||
|
}
|
||||||
|
println "SSH_ID used: ${env.SSH_ID}"
|
||||||
|
|
||||||
|
// load yocto common file
|
||||||
|
env.ROOTDIR = pwd()
|
||||||
|
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||||
|
|
||||||
|
// clean-up no longer needed packages
|
||||||
|
yoctocommon.cleanWorkspace()
|
||||||
|
|
||||||
|
// Prepare Build Environment
|
||||||
|
env.YOCTO_DEPLOYS = "${env.BUILD_DEPLOY_DIR}/${params.MACHINE}"
|
||||||
|
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "${params.IMAGE_TYPE}")
|
||||||
|
env.BUILD_VERSION = "${version}"
|
||||||
|
currentBuild.displayName = "${version}-${params.MACHINE}-${params.IMAGE_TYPE}" //replace Bitbake timestamp after building
|
||||||
|
printJobParameters()
|
||||||
|
yoctocommon.changeDistroVersion("${version}")
|
||||||
|
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||||
|
}
|
||||||
|
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('clean') {
|
||||||
|
when { expression { return params.CLEAN_BUILD } }
|
||||||
|
steps {
|
||||||
|
dir ("${env.SHARED_BUILD}/tmp") { deleteDir() }
|
||||||
|
dir ("${env.SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('build') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
build(params.IMAGE_TYPE, params.IMAGE_TYPE)
|
||||||
|
createArchive(params.IMAGE_TYPE, env.IMG_OUTPUT_DIR)
|
||||||
|
yoctocommon.archiveImages(env.IMG_OUTPUT_DIR, params.IMAGE_TYPE)
|
||||||
|
sh "rm -rf ${IMG_OUTPUT_DIR}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
script {
|
||||||
|
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('collect versions') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
revisions = yoctocommon.getAutoRevHashes(params.IMAGE_TYPE)
|
||||||
|
writeFile(file: "${env.AUTOREV_VERSION_FILE}", text: "${revisions}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
archiveArtifacts(artifacts: "${env.SUBMODULE_VERION_FILE}, ${env.AUTOREV_VERSION_FILE}, ${env.DISTRO_VERSION_FILE}", onlyIfSuccessful: false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // stages
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def printJobParameters() {
|
||||||
|
println "----------------------------------\n\
|
||||||
|
Job Parameters:\n\
|
||||||
|
----------------------------------\n\
|
||||||
|
MACHINE = ${params.MACHINE}\n\
|
||||||
|
IMAGE_TYPE = ${params.IMAGE_TYPE}\n\
|
||||||
|
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||||
|
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||||
|
--> version = ${env.BUILD_VERSION}\n\
|
||||||
|
----------------------------------\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def build(envType, imgType) {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
def envPostFix = "${envType}" == "" ? "" : "-${envType}"
|
||||||
|
sh "bash -c '. ./env.image${envPostFix} > /dev/null && bitbake -q -k virtual/netmodule-image'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def createArchive(imgType, outputDir) {
|
||||||
|
def imgTypePostfix = "${imgType}" == "" ? "" : "-${imgType}"
|
||||||
|
dir (outputDir) {
|
||||||
|
def image_basename = "netmodule-linux-image${imgTypePostfix}-${params.MACHINE}"
|
||||||
|
def basename_built = "${env.YOCTO_DEPLOYS}/${image_basename}"
|
||||||
|
def basename_archive = "./image${imgTypePostfix}-${params.MACHINE}"
|
||||||
|
|
||||||
|
sh "cp ${basename_built}.manifest ${basename_archive}.manifest"
|
||||||
|
sh "bash -c '${WORKSPACE}/openembedded-core/scripts/buildhistory-collect-srcrevs -p ${env.BUILD_HISTORY_DIR} > srcrev-${params.MACHINE}${imgTypePostfix}.inc'"
|
||||||
|
|
||||||
|
sh label: 'Copy License Manifest', returnStatus: true, script: """
|
||||||
|
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/netmodule-linux-image${imgTypePostfix}* | tail -n 1)
|
||||||
|
cp \$LATEST_LICENSE_DIR/license.manifest ${basename_archive}_license.manifest
|
||||||
|
"""
|
||||||
|
|
||||||
|
sh "cp ${env.YOCTO_DEPLOYS}/fitImage-${image_basename}-${params.MACHINE} fitImage-${image_basename}"
|
||||||
|
sh "cp ${basename_built}.cpio.gz ${basename_archive}.cpio.gz"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,234 @@
|
||||||
|
// Loading code requires a NODE context
|
||||||
|
// But we want the code accessible outside the node Context
|
||||||
|
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||||
|
def yoctocommon
|
||||||
|
|
||||||
|
|
||||||
|
// declarative pipeline
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node {
|
||||||
|
label "${params.NODE_NAME}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parameters {
|
||||||
|
choice(name: 'MACHINE', choices: ['select...', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||||
|
choice(name: 'IMAGE_TYPE', choices: ['dev', 'bootloader', 'release'], description: 'choose image type')
|
||||||
|
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||||
|
string(name: 'NODE_NAME', defaultValue: 'lxbuild4', description: 'Overwrite default node name to build on')
|
||||||
|
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
WORK_DIR = "tmp"
|
||||||
|
IMG_OUTPUT_DIR = "${WORK_DIR}/build-output"
|
||||||
|
}
|
||||||
|
|
||||||
|
options {
|
||||||
|
timeout(time: 5, unit: 'HOURS')
|
||||||
|
buildDiscarder(
|
||||||
|
logRotator(numToKeepStr: '50',
|
||||||
|
daysToKeepStr: '7',
|
||||||
|
artifactNumToKeepStr: '50',
|
||||||
|
artifactDaysToKeepStr: '7'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
disableConcurrentBuilds()
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('prepare') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
if("${params.MACHINE}" == "select...") {
|
||||||
|
currentBuild.result = 'ABORTED'
|
||||||
|
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanLeftOvers(env.IMG_OUTPUT_DIR)
|
||||||
|
|
||||||
|
// this definition is needed for selecting the
|
||||||
|
// correct build directory
|
||||||
|
env.BUILD_DIR_POSTFIX = ""
|
||||||
|
|
||||||
|
// take the correct user ID for the ssh connection of
|
||||||
|
// the belonging build server
|
||||||
|
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||||
|
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||||
|
}
|
||||||
|
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||||
|
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
env.SSH_ID = ''
|
||||||
|
}
|
||||||
|
println "SSH_ID used: ${env.SSH_ID}"
|
||||||
|
|
||||||
|
// load yocto common file
|
||||||
|
env.ROOTDIR = pwd()
|
||||||
|
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||||
|
|
||||||
|
// clean-up no longer needed packages
|
||||||
|
yoctocommon.cleanWorkspace()
|
||||||
|
|
||||||
|
// Prepare Build Environment
|
||||||
|
env.YOCTO_DEPLOYS = "${env.BUILD_DEPLOY_DIR}/${params.MACHINE}"
|
||||||
|
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "${params.IMAGE_TYPE}")
|
||||||
|
env.BUILD_VERSION = "${version}"
|
||||||
|
currentBuild.displayName = "${version}-${params.MACHINE}-${params.IMAGE_TYPE}" //replace Bitbake timestamp after building
|
||||||
|
printJobParameters()
|
||||||
|
yoctocommon.changeDistroVersion("${version}")
|
||||||
|
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||||
|
}
|
||||||
|
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('clean') {
|
||||||
|
when { expression { return params.CLEAN_BUILD } }
|
||||||
|
steps {
|
||||||
|
dir ("${SHARED_BUILD}/tmp") { deleteDir() }
|
||||||
|
dir ("${SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('build') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
build(params.IMAGE_TYPE)
|
||||||
|
|
||||||
|
// archive a bootloader package:
|
||||||
|
// the bootloader is also built with the dev image, hence we combine it
|
||||||
|
def isBootLoaderOnly = (params.IMAGE_TYPE == 'bootloader').toBoolean()
|
||||||
|
if(isBootLoaderOnly || (params.IMAGE_TYPE == 'dev')) {
|
||||||
|
createBslPackage(env.IMG_OUTPUT_DIR, params.MACHINE)
|
||||||
|
archivePackage(yoctocommon, env.IMG_OUTPUT_DIR, 'bootloader', !isBootLoaderOnly)
|
||||||
|
}
|
||||||
|
|
||||||
|
// archive an image package:
|
||||||
|
// skip for bootloader only builds
|
||||||
|
if(!isBootLoaderOnly) {
|
||||||
|
createImagePackage(params.IMAGE_TYPE, env.IMG_OUTPUT_DIR)
|
||||||
|
archiveOSTreeArtifact(env.IMG_OUTPUT_DIR)
|
||||||
|
archivePackage(yoctocommon, env.IMG_OUTPUT_DIR, params.IMAGE_TYPE, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
script {
|
||||||
|
cleanLeftOvers(env.WORK_DIR)
|
||||||
|
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('collect versions') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
revisions = yoctocommon.getAutoRevHashes('ostree')
|
||||||
|
writeFile(file: "${env.AUTOREV_VERSION_FILE}", text: "${revisions}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
archiveArtifacts(artifacts: "${env.SUBMODULE_VERION_FILE}, ${env.AUTOREV_VERSION_FILE}, ${env.DISTRO_VERSION_FILE}", onlyIfSuccessful: false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // stages
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def printJobParameters() {
|
||||||
|
println "----------------------------------\n\
|
||||||
|
Job Parameters:\n\
|
||||||
|
----------------------------------\n\
|
||||||
|
MACHINE = ${params.MACHINE}\n\
|
||||||
|
IMAGE_TYPE = ${params.IMAGE_TYPE}\n\
|
||||||
|
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||||
|
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||||
|
--> version = ${env.BUILD_VERSION}\n\
|
||||||
|
----------------------------------\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def cleanLeftOvers(cleaningDir) {
|
||||||
|
sh "rm -rf ${cleaningDir}"
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def build(imgType) {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
def tgtImgType = "${imgType}" == "bootloader" ? "dev" : "${imgType}"
|
||||||
|
def imgTypePostfix = "${tgtImgType}" == "" ? "" : "-${tgtImgType}"
|
||||||
|
sh "bash -c '. ./env.image-ostree > /dev/null && bitbake -q -k netmodule-linux-image${imgTypePostfix}'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def createBslPackage(outputDir, machine) {
|
||||||
|
dir(outputDir) {
|
||||||
|
String machineStr = machine
|
||||||
|
String[] machineArr = machineStr.split('-')
|
||||||
|
|
||||||
|
println "get bootloader artifacts for package..."
|
||||||
|
if(machineArr[0] == 'imx8') {
|
||||||
|
sh "cp ${env.YOCTO_DEPLOYS}/imx-boot ."
|
||||||
|
sh "cp ${env.YOCTO_DEPLOYS}/imx-boot.sd ."
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.img ."
|
||||||
|
sh "cp ${env.YOCTO_DEPLOYS}/*u-boot-${params.MACHINE}*.xmodem.bin ."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def createImagePackage(imgType, outputDir) {
|
||||||
|
dir (outputDir) {
|
||||||
|
println "get image artifacts for package..."
|
||||||
|
def imgTypePostfix = "${imgType}" == "" ? "" : "-${imgType}"
|
||||||
|
def image_basename = "netmodule-linux-image${imgTypePostfix}-${params.MACHINE}"
|
||||||
|
def basename_built = "${env.YOCTO_DEPLOYS}/${image_basename}"
|
||||||
|
def basename_archive = "./image${imgTypePostfix}-${params.MACHINE}"
|
||||||
|
|
||||||
|
sh "cp ${basename_built}.manifest ${basename_archive}.manifest"
|
||||||
|
sh "bash -c '${WORKSPACE}/openembedded-core/scripts/buildhistory-collect-srcrevs -p ${env.BUILD_HISTORY_DIR} > srcrev-${params.MACHINE}${imgTypePostfix}.inc'"
|
||||||
|
|
||||||
|
sh label: 'Copy License Manifest', returnStatus: true, script: """
|
||||||
|
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/netmodule-linux-image${imgTypePostfix}* | tail -n 1)
|
||||||
|
cp \$LATEST_LICENSE_DIR/license.manifest ${basename_archive}_license.manifest
|
||||||
|
"""
|
||||||
|
|
||||||
|
sh label: 'Copy initramfs License Manifest', returnStatus: true, script: """
|
||||||
|
LATEST_LICENSE_DIR=\$(ls -Artd ${env.BUILD_LICENSE_DIR}/initramfs-ostree-image-${params.MACHINE}-* | tail -n 1)
|
||||||
|
cp \$LATEST_LICENSE_DIR/license.manifest initramfs-ostree-image_license.manifest
|
||||||
|
"""
|
||||||
|
|
||||||
|
sh "cp ${basename_built}.ota-ext4 ${basename_archive}.ota-ext4"
|
||||||
|
sh "cp ${basename_built}.wic ${basename_archive}.wic"
|
||||||
|
sh "tar czf ./ostree_repo${imgTypePostfix}.tar.gz -C ${env.YOCTO_DEPLOYS}/ostree_repo ."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def archivePackage(yoctocommon, pkgDir, imgType, doCleanPkgDir) {
|
||||||
|
yoctocommon.archiveImages(pkgDir, imgType)
|
||||||
|
if(doCleanPkgDir) {
|
||||||
|
println "cleaning pkgDir..."
|
||||||
|
sh "rm -rf ./${pkgDir}/*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
def archiveOSTreeArtifact(outputDir) {
|
||||||
|
archiveArtifacts artifacts: "${outputDir}/ostree_repo*.tar.gz", onlyIfSuccessful: true
|
||||||
|
sh "rm -f ./${outputDir}/ostree_repo*.tar.gz"
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,173 @@
|
||||||
|
// Loading code requires a NODE context
|
||||||
|
// But we want the code accessible outside the node Context
|
||||||
|
// So declare yoctocommon (object created by the LOAD operation) outside the Node block.
|
||||||
|
def yoctocommon
|
||||||
|
|
||||||
|
|
||||||
|
// declarative pipeline
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node {
|
||||||
|
label "${params.NODE_NAME}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parameters {
|
||||||
|
choice(name: 'MACHINE', choices: ['select...', 'am335x-nrhw20', 'am335x-nmhw21', 'imx8-nmhw23', 'am335x-nmhw24', 'am335x-hw25', 'am335x-hw26'], description: 'choose target platform')
|
||||||
|
string(name: 'RLS_VERSION', defaultValue: '', description: 'Set the version to build and use committed submodules')
|
||||||
|
string(name: 'NODE_NAME', defaultValue: 'oem-ci', description: 'Enter a specific node name')
|
||||||
|
booleanParam(name: 'CLEAN_BUILD', defaultValue: false, description: 'clean all temp directories before build starts')
|
||||||
|
booleanParam(name: 'BUILD_FROM_DEV_IMAGE', defaultValue: false, description: 'build SDK from dev image')
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
// SDK Build Parameter (default is recipe netmodule-linux-sdk)
|
||||||
|
IS_NM_LINUX_SDK = "${!params.BUILD_FROM_DEV_IMAGE}"
|
||||||
|
}
|
||||||
|
|
||||||
|
options {
|
||||||
|
timeout(time: 8, unit: 'HOURS')
|
||||||
|
buildDiscarder(
|
||||||
|
logRotator(numToKeepStr: '5')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('prepare') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
if("${params.MACHINE}" == "select...") {
|
||||||
|
currentBuild.result = 'ABORTED'
|
||||||
|
error("Missing machine type --> select parameter MACHINE for a proper build")
|
||||||
|
}
|
||||||
|
|
||||||
|
// this definition is needed for selecting the
|
||||||
|
// correct build directory
|
||||||
|
env.BUILD_DIR_POSTFIX = ""
|
||||||
|
|
||||||
|
// take the correct user ID for the ssh connection of
|
||||||
|
// the belonging build server
|
||||||
|
if("${params.NODE_NAME}" == "lxbuild4") {
|
||||||
|
env.SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||||
|
}
|
||||||
|
else if("${params.NODE_NAME}" == "lxbuild3") {
|
||||||
|
env.SSH_ID = '70c27394-cb7d-4304-aed1-89e15a3a78d0'
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
error("Declare the NODE_NAME specific to the build agent")
|
||||||
|
}
|
||||||
|
println "SSH_ID used: ${env.SSH_ID}"
|
||||||
|
|
||||||
|
// load yocto common file
|
||||||
|
env.ROOTDIR = pwd()
|
||||||
|
yoctocommon = load "${env.ROOTDIR}/Jenkinsfile_Common"
|
||||||
|
|
||||||
|
// clean-up no longer needed packages
|
||||||
|
yoctocommon.cleanWorkspace()
|
||||||
|
|
||||||
|
// Prepare Build Environment
|
||||||
|
env.YOCTO_DEPLOYS = "${env.SHARED_BUILD}/tmp/deploy/sdk"
|
||||||
|
yoctocommon.handleSubmodules("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.handleAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
cleanLastBuildArtifacts()
|
||||||
|
version = yoctocommon.getVersionString("${params.RLS_VERSION}", "sdk")
|
||||||
|
env.BUILD_VERSION = "${version}"
|
||||||
|
currentBuild.displayName = "${version}-${params.MACHINE}-sdk" //replace Bitbake timestamp after building
|
||||||
|
printJobParameters()
|
||||||
|
yoctocommon.changeDistroVersion("${version}")
|
||||||
|
yoctocommon.syncSources("${env.BINARY_STORAGE_URL}", "${env.DOWNLOAD_DIR}")
|
||||||
|
}
|
||||||
|
writeFile file: 'VERSION', text: "${env.PACKAGE_NAME}: ${env.BUILD_VERSION}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('clean') {
|
||||||
|
when { expression { return params.CLEAN_BUILD } }
|
||||||
|
steps {
|
||||||
|
dir ("${SHARED_BUILD}/tmp") { deleteDir() }
|
||||||
|
dir ("${SHARED_BUILD}/tmp-glibc") { deleteDir() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('build') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
if(env.IS_NM_LINUX_SDK.toBoolean()) {
|
||||||
|
sh "bash -c '. ./env.image-ostree && bitbake -q netmodule-linux-sdk'"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sh "bash -c '. ./env.image-ostree && bitbake -q -fc populate_sdk netmodule-linux-image-dev'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
script{
|
||||||
|
deploySdkToArtifactory(params.BUILD_FROM_DEV_IMAGE, "${params.MACHINE}")
|
||||||
|
}
|
||||||
|
} // success
|
||||||
|
always {
|
||||||
|
script {
|
||||||
|
yoctocommon.cleanupAutoRevIncludeFile("${params.RLS_VERSION}")
|
||||||
|
yoctocommon.syncSources("${env.DOWNLOAD_DIR}", "${env.BINARY_STORAGE_URL}")
|
||||||
|
}
|
||||||
|
} // always
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // stages
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def printJobParameters() {
|
||||||
|
println "----------------------------------\n\
|
||||||
|
Job Parameters:\n\
|
||||||
|
----------------------------------\n\
|
||||||
|
MACHINE = ${params.MACHINE}\n\
|
||||||
|
CLEAN_BUILD = ${params.CLEAN_BUILD}\n\
|
||||||
|
IS_NM_LINUX_SDK = ${env.IS_NM_LINUX_SDK}\n\
|
||||||
|
RLS_VERSION = ${params.RLS_VERSION}\n\
|
||||||
|
--> version = ${env.BUILD_VERSION}\n\
|
||||||
|
----------------------------------\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def cleanLastBuildArtifacts() {
|
||||||
|
println "cleaning artifacts from last build..."
|
||||||
|
sh "rm -f ${env.YOCTO_DEPLOYS}/*"
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
sh "bash -c '. ./env.image-ostree > /dev/null && git fetch -ap && bitbake -q -c cleanall netmodule-linux-sdk'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def deploySdkToArtifactory(isBuildFromDev, machine) {
|
||||||
|
def cpuType = machine.split("-")[0]
|
||||||
|
def sdkArtifact = sh(returnStdout: true, script: "ls ${env.YOCTO_DEPLOYS}/netmodule-linux-ostree*-netmodule-linux-*.sh").trim()
|
||||||
|
if(isBuildFromDev) {
|
||||||
|
def jenkinsBaseUrl = "https://jenkins.netmodule.intranet"
|
||||||
|
def workspaceUrl = "${jenkinsBaseUrl}/job/NMOS/job/build-sdk/job/${env.BRANCH_NAME}/${currentBuild.number}/execution/node/3/ws/build/tmp/deploy/sdk"
|
||||||
|
def artifactFile = sh(returnStdout: true, script: "basename ${sdkArtifact}")
|
||||||
|
println "Download link of image-sdk (cpuType=${cpuType}): ${workspaceUrl}/${artifactFile}"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println "Deploying ${sdkArtifact} (cpuType=${cpuType}) to NEXUS..."
|
||||||
|
nexusArtifactUploader(
|
||||||
|
nexusVersion: "nexus3",
|
||||||
|
protocol: "https",
|
||||||
|
nexusUrl: "repo.netmodule.intranet:443",
|
||||||
|
groupId: "nm.sdk",
|
||||||
|
version: "latest",
|
||||||
|
repository: "nm-os",
|
||||||
|
credentialsId: "0099cd5a-81d4-4698-9b55-1206895d19fb",
|
||||||
|
artifacts: [
|
||||||
|
[artifactId: "${cpuType}",
|
||||||
|
classifier: "",
|
||||||
|
file: "${sdkArtifact}",
|
||||||
|
type: "sh"]
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,161 @@
|
||||||
|
// declarative pipeline
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node {
|
||||||
|
label 'lxbuild4'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parameters {
|
||||||
|
booleanParam(name: 'UPDATE_NM_PARTS', defaultValue: true, description: 'update the netmodule submodules (e.g. before releasing)')
|
||||||
|
booleanParam(name: 'UPDATE_COMMUNITY_PARTS', defaultValue: false, description: 'update the the community submodules (maintenance/security updates)')
|
||||||
|
}
|
||||||
|
|
||||||
|
environment {
|
||||||
|
SUBMODULE_VERSION_FILE = "submodule_revisions"
|
||||||
|
SOURCE_REVISION_UPDATE_LOG = "src_rev_update.log"
|
||||||
|
SSH_ID = '6b90ac7f-9596-4e43-923b-6c9179a10d8a'
|
||||||
|
}
|
||||||
|
|
||||||
|
options {
|
||||||
|
timeout(time: 1, unit: 'HOURS')
|
||||||
|
buildDiscarder(
|
||||||
|
logRotator(numToKeepStr: '5')
|
||||||
|
)
|
||||||
|
disableConcurrentBuilds()
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('prepare') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
if(params.UPDATE_NM_PARTS) {
|
||||||
|
currentBuild.displayName += "-nm"
|
||||||
|
}
|
||||||
|
if(params.UPDATE_COMMUNITY_PARTS) {
|
||||||
|
currentBuild.displayName += "-community"
|
||||||
|
}
|
||||||
|
printJobParameters()
|
||||||
|
prepareUpdate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('update to head') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
updateSubmodules(params.UPDATE_NM_PARTS, params.UPDATE_COMMUNITY_PARTS)
|
||||||
|
updateSourceRevisions(params.UPDATE_NM_PARTS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
archiveArtifacts(artifacts: "${env.SUBMODULE_VERSION_FILE}", onlyIfSuccessful: false)
|
||||||
|
sh "rm -f ${env.SUBMODULE_VERSION_FILE}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('commit') {
|
||||||
|
steps {
|
||||||
|
commitChanges(params.UPDATE_NM_PARTS, params.UPDATE_COMMUNITY_PARTS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // stages
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def printJobParameters() {
|
||||||
|
def node_name = "${NODE_NAME}"
|
||||||
|
println "Running on agent: ${node_name}\n\n"
|
||||||
|
println "----------------------------------\n\
|
||||||
|
Job Parameters:\n\
|
||||||
|
----------------------------------\n\
|
||||||
|
UPDATE_NM_PARTS = ${params.UPDATE_NM_PARTS}\n\
|
||||||
|
UPDATE_COMMUNITY_PARTS = ${params.UPDATE_COMMUNITY_PARTS}\n\
|
||||||
|
----------------------------------\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
def prepareUpdate() {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
sh 'git submodule update --init' // init submodules used if first checkout
|
||||||
|
}
|
||||||
|
def userId = "${currentBuild.getBuildCauses()[0].userId}"
|
||||||
|
def userName = "${currentBuild.getBuildCauses()[0].userName}"
|
||||||
|
if("${userId}" == "null") {
|
||||||
|
userId = "downstream"
|
||||||
|
userName = "Jenkins"
|
||||||
|
}
|
||||||
|
env.TRIGGERED_USER = "${userName} (userId=${userId})"
|
||||||
|
|
||||||
|
def notNmUpdate = "${!params.UPDATE_NM_PARTS}"
|
||||||
|
def notCommunityUpdate = "${!params.UPDATE_COMMUNITY_PARTS}"
|
||||||
|
if(notNmUpdate.toBoolean() && notCommunityUpdate.toBoolean()) {
|
||||||
|
error("Nothing to update selected - both parameters are false")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def updateSubmodules(isNmUpdate, isCommunityUpdate) {
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
if(isNmUpdate) {
|
||||||
|
sh(script: "git submodule update --remote --rebase meta-netmodule-*")
|
||||||
|
}
|
||||||
|
if(isCommunityUpdate) {
|
||||||
|
sh(script: "git submodule update --remote --rebase \$(git submodule status | grep -v \"meta-netmodule-*\" | sed 's/^ *//g' | cut -d' ' -f2)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
submoduleStatus = sh(returnStdout: true, script: "git submodule status").trim() // print submodule hashes to jenkins log
|
||||||
|
println "${submoduleStatus}"
|
||||||
|
writeFile(file: "${env.SUBMODULE_VERSION_FILE}", text: "${submoduleStatus}")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def updateMachineSrcRevs(machine) {
|
||||||
|
// set the reference machine to be able to load the yocto environment (used for devtool)
|
||||||
|
env.MACHINE = "${machine}"
|
||||||
|
println "update source revisions for ${env.MACHINE} to head..."
|
||||||
|
sh("echo '==> ${env.MACHINE} =======================' >> ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||||
|
sh(returnStdout: true, script: "bash -c '. ./env.image-ostree > /dev/null && cd ../ && ./src-rev.sh -v -d -r -l ./srcrev.log -a ./autorev-packages.inc'")
|
||||||
|
sh("cat ./srcrev.log >> ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def updateSourceRevisions(isNmUpdate) {
|
||||||
|
if(!isNmUpdate) {
|
||||||
|
// Netmodule layers are not called to update --> returning
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sh("echo '================== UPDATE SOURCE REVISIONS ==================' > ./${env.SOURCE_REVISION_UPDATE_LOG}")
|
||||||
|
updateMachineSrcRevs("am335x-hw26")
|
||||||
|
updateMachineSrcRevs("imx8-nmhw23")
|
||||||
|
sh(returnStdout: true, script: "git checkout build/conf/bblayers.conf")
|
||||||
|
}
|
||||||
|
|
||||||
|
def commitSourceRevisionChanges() {
|
||||||
|
println "commit source revision changes..."
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
sh(returnStdout: true, script: "./src-rev-commit.sh -b develop -v -l ./srcrev-commit.log")
|
||||||
|
}
|
||||||
|
sh "cat ./srcrev-commit.log >> ./${env.SOURCE_REVISION_UPDATE_LOG}"
|
||||||
|
}
|
||||||
|
|
||||||
|
def commitChanges(isNmUpdate, isCommunityUpdate) {
|
||||||
|
String updatedLayers = ""
|
||||||
|
if(isNmUpdate) {
|
||||||
|
commitSourceRevisionChanges()
|
||||||
|
updatedLayers += "netmodule"
|
||||||
|
}
|
||||||
|
if(isNmUpdate && isCommunityUpdate) { updatedLayers += " and " }
|
||||||
|
if(isCommunityUpdate) { updatedLayers += "community" }
|
||||||
|
sh(script: "git checkout ${env.BRANCH_NAME}")
|
||||||
|
sh(script: "git add -u")
|
||||||
|
stagedData = sh(returnStdout: true, script: "git diff --cached").trim()
|
||||||
|
if("${stagedData}" == "") {
|
||||||
|
println "everything up to date, nothing to commit"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sshagent (credentials: [env.SSH_ID]) {
|
||||||
|
sh(script: "git commit -m \"submodules: updated ${updatedLayers} hashes, triggered by ${env.TRIGGERED_USER}\" && git push")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -12,6 +12,8 @@ SRCREV_linux-netmodule = "${AUTOREV}"
|
||||||
SRCREV_nmubxlib = "${AUTOREV}"
|
SRCREV_nmubxlib = "${AUTOREV}"
|
||||||
#./meta-netmodule-bsp/recipes-connectivity/gnss-mgr/gnss-mgr.bb
|
#./meta-netmodule-bsp/recipes-connectivity/gnss-mgr/gnss-mgr.bb
|
||||||
SRCREV_gnss-mgr = "${AUTOREV}"
|
SRCREV_gnss-mgr = "${AUTOREV}"
|
||||||
|
#./meta-netmodule-bsp/recipes-connectivity/wlconf-bin/wlconf-bin_git.bb
|
||||||
|
SRCREV_wlconf-bin = "${AUTOREV}"
|
||||||
#./meta-netmodule-bsp/recipes-connectivity/wwan-config/wwan-config.bb
|
#./meta-netmodule-bsp/recipes-connectivity/wwan-config/wwan-config.bb
|
||||||
SRCREV_wwan-config = "${AUTOREV}"
|
SRCREV_wwan-config = "${AUTOREV}"
|
||||||
#./meta-netmodule-bsp/recipes-connectivity/modemmanager/modemmanager_git.bb
|
#./meta-netmodule-bsp/recipes-connectivity/modemmanager/modemmanager_git.bb
|
||||||
|
|
@ -36,8 +38,10 @@ SRCREV_libnmapp = "${AUTOREV}"
|
||||||
SRCREV_ssf-mgr = "${AUTOREV}"
|
SRCREV_ssf-mgr = "${AUTOREV}"
|
||||||
#./meta-netmodule-distro/recipes-tools/sys-mon/sys-mon_git.bb
|
#./meta-netmodule-distro/recipes-tools/sys-mon/sys-mon_git.bb
|
||||||
SRCREV_sys-mon = "${AUTOREV}"
|
SRCREV_sys-mon = "${AUTOREV}"
|
||||||
#./meta-netmodule-bsp/recipes-bsp/storage-info/storage-info.bb
|
#./meta-netmodule-distro/recipes-tools/scripts-nm/json2textlog_git.bb
|
||||||
SRCREV_storage-info = "${AUTOREV}"
|
SRCREV_json2textlog = "${AUTOREV}"
|
||||||
|
#./meta-netmodule-distro/recipes-tools/scripts-nm/scripts-nm_git.bb
|
||||||
|
SRCREV_scripts-nm = "${AUTOREV}"
|
||||||
#./meta-netmodule-distro/recipes-tools/battery-test/battery-test_git.bb
|
#./meta-netmodule-distro/recipes-tools/battery-test/battery-test_git.bb
|
||||||
SRCREV_battery-test = "${AUTOREV}"
|
SRCREV_battery-test = "${AUTOREV}"
|
||||||
#./meta-netmodule-fct/recipes-testing/fct/fct.bb
|
#./meta-netmodule-fct/recipes-testing/fct/fct.bb
|
||||||
|
|
@ -52,3 +56,5 @@ SRCREV_toast = "${AUTOREV}"
|
||||||
SRCREV_gpsd_om = "${AUTOREV}"
|
SRCREV_gpsd_om = "${AUTOREV}"
|
||||||
#./meta-netmodule-private/recipes-extended/hydra/hydra_1.0.0.bb
|
#./meta-netmodule-private/recipes-extended/hydra/hydra_1.0.0.bb
|
||||||
SRCREV_hydra = "${AUTOREV}"
|
SRCREV_hydra = "${AUTOREV}"
|
||||||
|
#./meta-netmodule-bsp/recipes-bsp/fpga-image/fpga-image.bb
|
||||||
|
SRCREV_fpga-image = "${AUTOREV}"
|
||||||
2
bitbake
2
bitbake
|
|
@ -1 +1 @@
|
||||||
Subproject commit 0784db7dd0fef6f0621ad8d74372f44e87fef950
|
Subproject commit b4117231bf070703b9375af4411bcd160e07fdae
|
||||||
|
|
@ -1 +1 @@
|
||||||
../meta-netmodule-fct/conf
|
meta/conf
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
../meta-netmodule-distro/conf
|
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
if [ -z "$DISTRO" ]; then
|
||||||
|
echo "No distro selected. Please source a helper script (env.*)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$MACHINE" ]; then
|
||||||
|
. ./machine_select
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo Building for machine $MACHINE, distro: $DISTRO
|
||||||
|
|
||||||
|
export DISTRO
|
||||||
|
export MACHINE
|
||||||
|
export OSTREE_OSNAME=nm-linux
|
||||||
|
|
||||||
|
. ./openembedded-core/oe-init-build-env build$BUILD_FOLDER_APPEND
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
DISTRO="netmodule-linux"
|
||||||
|
BUILD_FOLDER_APPEND=""
|
||||||
|
. ./env.common
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
DISTRO="netmodule-linux-fct"
|
||||||
|
BUILD_FOLDER_APPEND="-fct"
|
||||||
|
. ./env.common
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
DISTRO="netmodule-linux-lava"
|
||||||
|
BUILD_FOLDER_APPEND="-fct"
|
||||||
|
. ./env.common
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
DISTRO="netmodule-linux-minimal"
|
DISTRO="netmodule-linux-minimal"
|
||||||
BUILD_FOLDER_APPEND="-minimal"
|
BUILD_FOLDER_APPEND="-minimal"
|
||||||
env_path=${BASH_SOURCE:-$0}
|
. ./env.common
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
DISTRO="netmodule-linux-ostree"
|
||||||
|
BUILD_FOLDER_APPEND=""
|
||||||
|
. ./env.common
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
DISTRO="netmodule-linux-ostree-vcu"
|
||||||
|
BUILD_FOLDER_APPEND=""
|
||||||
|
. ./env.common
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
if [ -z "$DISTRO" ]; then
|
|
||||||
echo "No distro selected. Please source a helper script (env/distro/*)"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$MACHINE" ]; then
|
|
||||||
echo "No machine selected. Please source a helper script (env/machine/*)"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo Build parameters:
|
|
||||||
printf "\tMACHINE = $MACHINE\n"
|
|
||||||
printf "\tDISTRO = $DISTRO\n"
|
|
||||||
echo
|
|
||||||
|
|
||||||
export DISTRO
|
|
||||||
export MACHINE
|
|
||||||
export OSTREE_OSNAME=nm-linux
|
|
||||||
|
|
||||||
# Moving back to top yocto directory (env/..)
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
cd $(dirname $env_path)/..
|
|
||||||
source openembedded-core/oe-init-build-env build$BUILD_FOLDER_APPEND
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
DISTRO="netmodule-linux"
|
|
||||||
BUILD_FOLDER_APPEND=""
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
DISTRO="netmodule-linux-fct"
|
|
||||||
BUILD_FOLDER_APPEND="-fct"
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
DISTRO="netmodule-linux-lava"
|
|
||||||
BUILD_FOLDER_APPEND="-fct"
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
DISTRO="netmodule-linux-ostree"
|
|
||||||
BUILD_FOLDER_APPEND=""
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
DISTRO="netmodule-linux-ostree-vcu"
|
|
||||||
BUILD_FOLDER_APPEND=""
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-nrhw16
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=armada-385-hw17
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=armada-385-nrhw18
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-nrhw20
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-nmhw21
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=imx8-nmhw23
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-nmhw24
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-hw25
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
MACHINE=am335x-hw26
|
|
||||||
env_path=${BASH_SOURCE:-$0}
|
|
||||||
. $(dirname $env_path)/../common
|
|
||||||
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
PS3='Please choose a machine: '
|
PS3='Please choose a machine: '
|
||||||
HW16="nrhw16 - NB800"
|
HW16="nrhw16 - NB800"
|
||||||
HW17="hw17 - NB2800"
|
|
||||||
HW18="nrhw18 - NB1800"
|
HW18="nrhw18 - NB1800"
|
||||||
HW20="nrhw20 - NB1601"
|
HW20="nrhw20 - NB1601"
|
||||||
HW21="nmhw21 - VCU1"
|
HW21="nmhw21 - VCU1"
|
||||||
|
|
@ -14,17 +13,13 @@ HW24="nrhw24 - NB800 facelift"
|
||||||
HW25="hw25 - TC Router"
|
HW25="hw25 - TC Router"
|
||||||
HW26="hw26 - NG800"
|
HW26="hw26 - NG800"
|
||||||
|
|
||||||
select opt in HW16 HW17 HW18 HW20 HW21 HW23 DLM HW24 HW25 HW26
|
select opt in HW16 HW18 HW20 HW21 HW23 DLM HW24 HW25 HW26
|
||||||
do
|
do
|
||||||
case $opt in
|
case $opt in
|
||||||
HW16)
|
HW16)
|
||||||
MACHINE=am335x-nrhw16
|
MACHINE=am335x-nrhw16
|
||||||
break
|
break
|
||||||
;;
|
;;
|
||||||
HW17)
|
|
||||||
MACHINE=armada-385-hw17
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
HW18)
|
HW18)
|
||||||
MACHINE=armada-385-nrhw18
|
MACHINE=armada-385-nrhw18
|
||||||
break
|
break
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 3dbca0ef2ae1ea3862e540c59ad77adbcb419d57
|
Subproject commit 727fd8df20c8ee58474ce15cd5e1459f14bee977
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 901c12f911e4fa4ae351ca6da07e4b4dacd1d6f1
|
Subproject commit 405c374f05efc48d39c009b0f7cf74281286b5d6
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 30d95eb8c877bb655cfd11a5dd1f7ba0a533e2b7
|
Subproject commit ab5d795f00cb08b275278bcccf8d9618e827214c
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 7538f89cfdd7624df5c2a7ba9d3ceb01d8536671
|
Subproject commit f2386004959ca7e311fccf083f301c0d53003a08
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8bd0b8ee7ade4aaec66d0a1dfe4d6aa65e8dd661
|
Subproject commit b9300d6c34aa6a3b4b62d8edc9abdffc388415d2
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit afeb746c8e8d3b6daf5f05cdb1e692ea593db696
|
Subproject commit b178890686dfe13ff9b98e6f33f27c8fd9adf110
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8ff12bfffcf0840d5518788a53d88d708ad3aae0
|
Subproject commit 3cf22d15885cf0e3e16078705e9a20f3d21f12db
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit f2f5ca077baa1f08001cff9608ae59ed4dbeca3d
|
Subproject commit 5d49b28570ed030924ed5d45fbced24d3cb6e588
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8e81d38048c953d0823abf04d5b2506cd988f0bb
|
Subproject commit d12c44df66ca065a1526c7d6885e726cf50aab46
|
||||||
|
|
@ -0,0 +1,130 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
SCRIPT_PARAMS="$*"
|
||||||
|
SCRIPT_PATHNAME=$(realpath ${0})
|
||||||
|
SCRIPT_NAME=$(basename ${SCRIPT_PATHNAME})
|
||||||
|
SCRIPT_PATH=$(dirname ${SCRIPT_PATHNAME})
|
||||||
|
|
||||||
|
export LOGFILE=/dev/null
|
||||||
|
export BRANCH_NAME=develop
|
||||||
|
export IS_DUMMY=false
|
||||||
|
export IS_VERBOSE=false
|
||||||
|
|
||||||
|
#**********************************************************************************************
|
||||||
|
# local helper functions
|
||||||
|
#**********************************************************************************************
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function printUsage()
|
||||||
|
{
|
||||||
|
echo -e "\nUsage: ${SCRIPT_NAME} [OPTIONS]\n"
|
||||||
|
echo -e ""
|
||||||
|
echo -e " OPTIONS:"
|
||||||
|
echo -e " -b|--branch=BRANCH_NAME commit to branch BRANCH_NAME (default = ${BRANCH_NAME})"
|
||||||
|
echo -e " -d|--dummy-commit display found SRVREVs"
|
||||||
|
echo -e " -l|--log=LOGFILE write some output to LOGFILE (default = $LOGFILE)"
|
||||||
|
echo -e " -h|--help Show this help"
|
||||||
|
echo -e " -v|--verbose Set script to verbose"
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function logMessage()
|
||||||
|
{
|
||||||
|
local msg="${1}"
|
||||||
|
echo "${msg}" >> $LOGFILE
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function printMessage()
|
||||||
|
{
|
||||||
|
local msg="${1}"
|
||||||
|
logMessage "${msg}"
|
||||||
|
if [[ "${IS_VERBOSE}" == "false" ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
echo "${msg}"
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function updateLayerToHead() {
|
||||||
|
local layer="${1}"
|
||||||
|
local branch="${2}"
|
||||||
|
|
||||||
|
logMessage "--> checking out branch ${BRNCH_NAME}"
|
||||||
|
git checkout ${BRANCH_NAME} >> $LOGFILE 2>&1
|
||||||
|
logMessage "--> pull changes ${BRANCH_NAME}"
|
||||||
|
git pull >> $LOGFILE 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
#**********************************************************************************************
|
||||||
|
# main
|
||||||
|
#**********************************************************************************************
|
||||||
|
O=$(getopt -o hb:l:vd --long help,branch:,log:,verbose,dummy-commit -- "$@") || exit 1
|
||||||
|
if [ $? != 0 ]; then
|
||||||
|
echo "ERROR: Could not parse command line options"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
eval set -- "$O"
|
||||||
|
while true; do
|
||||||
|
case "${1}" in
|
||||||
|
-b|--branch)
|
||||||
|
export BRANCH_NAME="${2}"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-v|--verbose)
|
||||||
|
export IS_VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-d|--dummy-commit)
|
||||||
|
export IS_DUMMY=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-l|--log)
|
||||||
|
export LOGFILE="${2}"
|
||||||
|
export LOGFILE=$(realpath "${LOGFILE}")
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
export IS_VERBOSE=false
|
||||||
|
printUsage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
printUsage; exit 0 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "${SCRIPT_NAME} called with ${SCRIPT_PARAMS}" > $LOGFILE
|
||||||
|
|
||||||
|
|
||||||
|
for nmLayer in $(git status | grep "meta-netmodule" | cut -d':' -f2 | sed -e 's/^[ ]*//' | cut -d' ' -f1); do
|
||||||
|
cd $nmLayer
|
||||||
|
nbrChanges=$(git status | grep "modified:" | wc -l)
|
||||||
|
printMessage "${nbrChanges} changes in layer ${nmLayer}"
|
||||||
|
if [[ "${nbrChanges}" != "0" ]]; then
|
||||||
|
logMessage "--> stash the changes first to checkout branch head..."
|
||||||
|
git stash save >> $LOGFILE 2>&1
|
||||||
|
updateLayerToHead "${nmLayer}" "${BRANCH_NAME}"
|
||||||
|
logMessage "--> get changes back from stash..."
|
||||||
|
git stash pop >> $LOGFILE 2>&1
|
||||||
|
logMessage "--> adding tracked and changed files"
|
||||||
|
git add -u >> $LOGFILE 2>&1
|
||||||
|
printMessage "--> committing and pushing..."
|
||||||
|
commitMsg="${nmLayer}: updated source revisions"
|
||||||
|
if [[ "${IS_DUMMY}" == "true" ]]; then
|
||||||
|
printMessage " dummy commit: msg='${commitMsg}', content:"
|
||||||
|
git status | grep "modified" >> $LOGFILE 2>&1
|
||||||
|
# revert changes to simulate a commit :-P
|
||||||
|
git reset HEAD * > /dev/null
|
||||||
|
git checkout * > /dev/null
|
||||||
|
else
|
||||||
|
git commit -m "${commitMsg}" >> $LOGFILE 2>&1
|
||||||
|
git push >> $LOGFILE 2>&1
|
||||||
|
fi
|
||||||
|
printMessage "----------"
|
||||||
|
fi
|
||||||
|
cd ..
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
@ -0,0 +1,190 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
SCRIPT_PARAMS="$*"
|
||||||
|
SCRIPT_PATHNAME=$(realpath ${0})
|
||||||
|
SCRIPT_NAME=$(basename ${SCRIPT_PATHNAME})
|
||||||
|
SCRIPT_PATH=$(dirname ${SCRIPT_PATHNAME})
|
||||||
|
YOCTO_DIR="${YOCTO_DIR:-$SCRIPT_PATH}"
|
||||||
|
|
||||||
|
LOGFILE=/dev/null
|
||||||
|
IS_REPLACE_SRCREV=false
|
||||||
|
IS_VERBOSE=false
|
||||||
|
IS_DISPLAY=false
|
||||||
|
LIST=""
|
||||||
|
|
||||||
|
#**********************************************************************************************
|
||||||
|
# local helper functions
|
||||||
|
#**********************************************************************************************
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function printUsage()
|
||||||
|
{
|
||||||
|
echo -e "\nUsage: ${SCRIPT_NAME} [OPTIONS]\n"
|
||||||
|
echo -e "find the source revision of the packages within the netmodule meta layers and"
|
||||||
|
echo -e "display/replace them with the latest hashes."
|
||||||
|
echo -e "NOTE: there is a list containing packages set to AUTOREV. Use option -a to handle"
|
||||||
|
echo -e "only revisions of this list. Otherwise we might get incompatible versions of 3rd"
|
||||||
|
echo -e "party packages.\n"
|
||||||
|
echo -e ""
|
||||||
|
echo -e " OPTIONS:"
|
||||||
|
echo -e " -r|--replace replace SRCREVs"
|
||||||
|
echo -e " -d|--display display found SRVREVs"
|
||||||
|
echo -e " -a|--autorev-list=LIST handle only revisions of LIST (e.g. autorev-packages.inc)"
|
||||||
|
echo -e " -l|--log=LOGFILE write command output to LOGFILE (default = $LOGFILE)"
|
||||||
|
echo -e " -h|--help show this help"
|
||||||
|
echo -e " -v|--verbose set script to verbose"
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function logMessage()
|
||||||
|
{
|
||||||
|
local msg="${1}"
|
||||||
|
echo "${msg}" >> $LOGFILE
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function printMessage()
|
||||||
|
{
|
||||||
|
local msg="${1}"
|
||||||
|
logMessage "${msg}"
|
||||||
|
if [[ "${IS_VERBOSE}" == "false" ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
echo "${msg}"
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function checkingEnvironment()
|
||||||
|
{
|
||||||
|
printMessage "> checking environment for devtool..."
|
||||||
|
isEnvLoaded=$(which devtool | wc -l)
|
||||||
|
logMessage "> isEnvLoaded=${isEnvLoaded}"
|
||||||
|
if [[ "${isEnvLoaded}" == "0" ]]; then
|
||||||
|
logMessage "Yocto environment not loaded (devtool not found) --> exiting"
|
||||||
|
echo "Yocto environment not loaded (devtool not found) --> exiting"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function getBBFiles()
|
||||||
|
{
|
||||||
|
if [[ "${LIST}" == "" ]]; then
|
||||||
|
files=$(find ${YOCTO_DIR}/meta-netmodule* -name "*.bb" | xargs -i sh -c "grep -q SRCREV {} && echo {}")
|
||||||
|
else
|
||||||
|
files=$(cat $LIST | grep "#" | cut -d'#' -f2)
|
||||||
|
fi
|
||||||
|
echo "${files}"
|
||||||
|
}
|
||||||
|
#----------------------------------------------------------------------------------------------
|
||||||
|
function displayItem()
|
||||||
|
{
|
||||||
|
local bbfile="${1}"
|
||||||
|
local recipeName="${2}"
|
||||||
|
local revision="${3}"
|
||||||
|
|
||||||
|
printMessage "Recipe: $recipeName"
|
||||||
|
printMessage "New Revision: $revision"
|
||||||
|
printMessage "BB File: $bbfile"
|
||||||
|
if [[ "${IS_DISPLAY}" == "true" && "${IS_VERBOSE}" == "false" ]]; then
|
||||||
|
echo "Recipe: $recipeName"
|
||||||
|
echo "New Revision: $revision"
|
||||||
|
echo "BB File: $bbfile"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#**********************************************************************************************
|
||||||
|
# main
|
||||||
|
#**********************************************************************************************
|
||||||
|
O=$(getopt -o hl:a:vrd --long help,log:,autorev-list:,verbose,replace,display -- "$@")
|
||||||
|
if [ $? != 0 ]; then
|
||||||
|
echo "ERROR: Could not parse command line options"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
eval set -- "$O"
|
||||||
|
while true; do
|
||||||
|
case "${1}" in
|
||||||
|
-v|--verbose)
|
||||||
|
export IS_VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-d|--display)
|
||||||
|
export IS_DISPLAY=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-r|--replace)
|
||||||
|
export IS_REPLACE_SRCREV=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-a|--autorev-list)
|
||||||
|
export LIST="${2}"
|
||||||
|
export LIST=$(realpath "${LIST}")
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-l|--log)
|
||||||
|
export LOGFILE="${2}"
|
||||||
|
export LOGFILE=$(realpath "${LOGFILE}")
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
export IS_VERBOSE=false
|
||||||
|
printUsage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
printUsage; exit 0 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "${SCRIPT_NAME} called with ${SCRIPT_PARAMS}" > $LOGFILE
|
||||||
|
checkingEnvironment
|
||||||
|
|
||||||
|
printMessage "> get bbfiles (LIST='${LIST}')..."
|
||||||
|
bbfiles=$(getBBFiles)
|
||||||
|
logMessage "${bbfiles}"
|
||||||
|
|
||||||
|
printMessage "> getting recipes residing in bbfiles..."
|
||||||
|
recipes=$(echo "$bbfiles" | xargs -i basename {} | sed 's/_.*//' | sed 's/\.bb//')
|
||||||
|
logMessage "${recipes}"
|
||||||
|
|
||||||
|
printMessage "> getting check-upgrade-status..."
|
||||||
|
newcommits=$(devtool check-upgrade-status $recipes 2>&1 | grep "new commits")
|
||||||
|
logMessage "${newcommits}"
|
||||||
|
|
||||||
|
IFS=$'\n'
|
||||||
|
for newcommit in $newcommits; do
|
||||||
|
# We need to restore IFS for sed
|
||||||
|
IFS=$' \t\n'
|
||||||
|
# Get recipe name
|
||||||
|
recipe_name=$(echo $newcommit | cut -d " " -f 2)
|
||||||
|
# Get the last string in line
|
||||||
|
newrev=$(echo $newcommit | sed 's/.* //')
|
||||||
|
logMessage "> newCommit=${newcommit}; recipeName=${recipe_name}; newRev=${newrev}"
|
||||||
|
|
||||||
|
# i acts as bbfile index like bbfile[i]
|
||||||
|
i=1
|
||||||
|
found=0
|
||||||
|
for recipe in $recipes; do
|
||||||
|
if [ "$recipe" == "$recipe_name" ]; then
|
||||||
|
bbfile=$(echo $bbfiles | cut -d " " -f$i)
|
||||||
|
displayItem "${bbfile}" "${recipe}" "${newrev}"
|
||||||
|
found=1
|
||||||
|
if [ "${IS_REPLACE_SRCREV}" == "true" ]; then
|
||||||
|
printMessage " --> updating ${recipe} in ${bbfile} to ${newrev}"
|
||||||
|
sed -i "s/SRCREV.*/SRCREV ?= \"$newrev\"/g" $bbfile
|
||||||
|
fi
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
i=$((i+1))
|
||||||
|
done
|
||||||
|
if [ "$found" == "0" ]; then
|
||||||
|
logMessage "Recipe ${recipe_name} not found --> exiting"
|
||||||
|
echo "Recipe ${recipe_name} not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
logMessage "--------------------"
|
||||||
|
done
|
||||||
|
|
||||||
|
logMessage "> all recipes handled"
|
||||||
|
exit 0
|
||||||
Loading…
Reference in New Issue