From dfc7eea01521282bc758345b189645903ff0ca06 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 12 Nov 2024 17:29:31 +0000 Subject: [PATCH 01/11] Bot Updating Templated Files --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 575b7ce..77fa72f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -884,7 +884,7 @@ pipeline { echo '{"tag_name":"'${META_TAG}'",\ "target_commitish": "main",\ "name": "'${META_TAG}'",\ - "body": "**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start + "body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start printf '","draft": false,"prerelease": false}' >> releasebody.json paste -d'\\0' start releasebody.json > releasebody.json.done curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' From 1753e9a9d39dbcb14baf5b33d5160f24fcac9081 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 12 Nov 2024 17:30:52 +0000 Subject: [PATCH 02/11] Bot Updating Templated Files --- .github/CONTRIBUTING.md | 4 ++-- .github/workflows/package_trigger_scheduler.yml | 5 +++-- README.md | 5 +++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 270ae37..3392aa4 100755 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -105,10 +105,10 @@ docker build \ -t linuxserver/unifi-network-application:latest . ``` -The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` +The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static` ```bash -docker run --rm --privileged multiarch/qemu-user-static:register --reset +docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset ``` Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. diff --git a/.github/workflows/package_trigger_scheduler.yml b/.github/workflows/package_trigger_scheduler.yml index 294591e..3881143 100755 --- a/.github/workflows/package_trigger_scheduler.yml +++ b/.github/workflows/package_trigger_scheduler.yml @@ -70,13 +70,14 @@ jobs: if [[ -n "${triggered_branches}" ]] || [[ -n "${skipped_branches}" ]]; then if [[ -n "${triggered_branches}" ]]; then NOTIFY_BRANCHES="**Triggered:** ${triggered_branches} \n" + NOTIFY_BUILD_URL="**Build URL:** https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-unifi-network-application/activity/ \n" + echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****" fi if [[ -n "${skipped_branches}" ]]; then NOTIFY_BRANCHES="${NOTIFY_BRANCHES}**Skipped:** ${skipped_branches} \n" fi - echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****" echo "**** Notifying Discord ****" curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, - "description": "**Package Check Build(s) Triggered for unifi-network-application** \n'"${NOTIFY_BRANCHES}"'**Build URL:** '"https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-unifi-network-application/activity/"' \n"}], + "description": "**Package Check Build(s) for unifi-network-application** \n'"${NOTIFY_BRANCHES}"''"${NOTIFY_BUILD_URL}"'"}], "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} fi diff --git a/README.md b/README.md index 8d2439d..c2df8cf 100644 --- a/README.md +++ b/README.md @@ -148,6 +148,7 @@ The simplest migration approach is to take a full backup of your existing instal You can then start up the new container with a clean `/config` mount (and a database container configured), and perform a restore using the setup wizard. + ### Strict reverse proxies This image uses a self-signed certificate by default. This naturally means the scheme is `https`. @@ -405,10 +406,10 @@ docker build \ -t lscr.io/linuxserver/unifi-network-application:latest . ``` -The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` +The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static` ```bash -docker run --rm --privileged multiarch/qemu-user-static:register --reset +docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset ``` Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. From e42e6b419dea21eacd5e67c75e58666ca51152f0 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 12 Nov 2024 17:33:49 +0000 Subject: [PATCH 03/11] Bot Updating Package Versions --- package_versions.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package_versions.txt b/package_versions.txt index be8673a..5e1bd8d 100755 --- a/package_versions.txt +++ b/package_versions.txt @@ -141,7 +141,7 @@ jmdns 3.4.1 jav jna 5.12.1 java-archive joda-time 2.12.7 java-archive jq 1.7.1-3build1 deb -jrt-fs 17.0.12 java-archive +jrt-fs 17.0.13 java-archive json 20231013 java-archive jsr305 3.0.1 java-archive jsr305 3.0.2 java-archive @@ -258,7 +258,7 @@ mount 2.39.3-9ubuntu6.1 deb ncurses-base 6.4+20240113-1ubuntu2 deb ncurses-bin 6.4+20240113-1ubuntu2 deb netcat-openbsd 1.226-1ubuntu2 deb -openjdk-17-jre-headless 17.0.12+7-1ubuntu2~24.04 deb +openjdk-17-jre-headless 17.0.13+11-2ubuntu1~24.04 deb openssh 1.0 java-archive openssl 3.0.13-0ubuntu3.4 deb org.eclipse.paho.client.mqttv3 1.1.0 java-archive From 683806afc120418129b21c13c6cde6a8d8d3bdf2 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 3 Dec 2024 17:34:17 +0000 Subject: [PATCH 04/11] Bot Updating Templated Files --- Jenkinsfile | 268 ++++++++++++++++++++++++++++++++++------------------ 1 file changed, 176 insertions(+), 92 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 77fa72f..80d3574 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -8,7 +8,7 @@ pipeline { } // Input to determine if this is a package check parameters { - string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK') + string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK') } // Configuration for the variables used for this specific repo environment { @@ -191,6 +191,7 @@ pipeline { env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN + env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' } } } @@ -215,6 +216,7 @@ pipeline { env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/' + env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' } } } @@ -239,6 +241,7 @@ pipeline { env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' + env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' } } } @@ -335,6 +338,35 @@ pipeline { else echo "No templates to delete" fi + echo "Starting Stage 2.5 - Update init diagram" + if ! grep -q 'init_diagram:' readme-vars.yml; then + echo "Adding the key 'init_diagram' to readme-vars.yml" + sed -i '\\|^#.*changelog.*$|d' readme-vars.yml + sed -i 's|^changelogs:|# init diagram\\ninit_diagram:\\n\\n# changelog\\nchangelogs:|' readme-vars.yml + fi + mkdir -p ${TEMPDIR}/d2 + docker run --rm -v ${TEMPDIR}/d2:/output -e PUID=$(id -u) -e PGID=$(id -g) -e RAW="true" ghcr.io/linuxserver/d2-builder:latest ${CONTAINER_NAME}:latest + ls -al ${TEMPDIR}/d2 + yq -ei ".init_diagram |= load_str(\\"${TEMPDIR}/d2/${CONTAINER_NAME}-latest.d2\\")" readme-vars.yml + if [[ $(md5sum readme-vars.yml | cut -c1-8) != $(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/readme-vars.yml | cut -c1-8) ]]; then + echo "'init_diagram' has been updated. Updating repo and exiting build, new one will trigger based on commit." + mkdir -p ${TEMPDIR}/repo + git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} + cd ${TEMPDIR}/repo/${LS_REPO} + git checkout -f main + cp ${WORKSPACE}/readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/readme-vars.yml + git add readme-vars.yml + git commit -m 'Bot Updating Templated Files' + git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git main + git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git main + echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} + echo "Updating templates and exiting build, new one will trigger based on commit" + rm -Rf ${TEMPDIR} + exit 0 + else + echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} + echo "Init diagram is unchanged" + fi echo "Starting Stage 3 - Update templates" CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) cd ${TEMPDIR}/docker-${CONTAINER_NAME} @@ -543,8 +575,40 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ - --provenance=false --sbom=false \ + --provenance=false --sbom=false --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." + sh '''#! /bin/bash + set -e + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + done + ''' + withCredentials([ + [ + $class: 'UsernamePasswordMultiBinding', + credentialsId: 'Quay.io-Robot', + usernameVariable: 'QUAYUSER', + passwordVariable: 'QUAYPASS' + ] + ]) { + retry_backoff(5,5) { + sh '''#! /bin/bash + set -e + echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin + echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin + echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin + echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin + if [[ "${PACKAGE_CHECK}" != "true" ]]; then + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & + done + wait + fi + ''' + } + } } } // Build MultiArch Docker containers for push to LS Repo @@ -575,8 +639,40 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ - --provenance=false --sbom=false \ + --provenance=false --sbom=false --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." + sh '''#! /bin/bash + set -e + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + done + ''' + withCredentials([ + [ + $class: 'UsernamePasswordMultiBinding', + credentialsId: 'Quay.io-Robot', + usernameVariable: 'QUAYUSER', + passwordVariable: 'QUAYPASS' + ] + ]) { + retry_backoff(5,5) { + sh '''#! /bin/bash + set -e + echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin + echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin + echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin + echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin + if [[ "${PACKAGE_CHECK}" != "true" ]]; then + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & + done + wait + fi + ''' + } + } } } stage('Build ARM64') { @@ -585,10 +681,6 @@ pipeline { } steps { echo "Running on node: ${NODE_NAME}" - echo 'Logging into Github' - sh '''#! /bin/bash - echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin - ''' sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64" sh "docker buildx build \ --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ @@ -604,18 +696,47 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ - --provenance=false --sbom=false \ + --provenance=false --sbom=false --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." - sh "docker tag ${IMAGE}:arm64v8-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" - retry_backoff(5,5) { - sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" + sh '''#! /bin/bash + set -e + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} + done + ''' + withCredentials([ + [ + $class: 'UsernamePasswordMultiBinding', + credentialsId: 'Quay.io-Robot', + usernameVariable: 'QUAYUSER', + passwordVariable: 'QUAYPASS' + ] + ]) { + retry_backoff(5,5) { + sh '''#! /bin/bash + set -e + echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin + echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin + echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin + echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin + if [[ "${PACKAGE_CHECK}" != "true" ]]; then + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} & + done + wait + fi + ''' + } } sh '''#! /bin/bash containers=$(docker ps -aq) if [[ -n "${containers}" ]]; then docker stop ${containers} fi - docker system prune -af --volumes || : ''' + docker system prune -af --volumes || : + ''' } } } @@ -765,37 +886,23 @@ pipeline { environment name: 'EXIT_STATUS', value: '' } steps { - withCredentials([ - [ - $class: 'UsernamePasswordMultiBinding', - credentialsId: 'Quay.io-Robot', - usernameVariable: 'QUAYUSER', - passwordVariable: 'QUAYPASS' - ] - ]) { - retry_backoff(5,5) { - sh '''#! /bin/bash - set -e - echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin - echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin - echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin - echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin - for PUSHIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do - docker tag ${IMAGE}:${META_TAG} ${PUSHIMAGE}:${META_TAG} - docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:latest - docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${EXT_RELEASE_TAG} - if [ -n "${SEMVER}" ]; then - docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${SEMVER} - fi - docker push ${PUSHIMAGE}:latest - docker push ${PUSHIMAGE}:${META_TAG} - docker push ${PUSHIMAGE}:${EXT_RELEASE_TAG} - if [ -n "${SEMVER}" ]; then - docker push ${PUSHIMAGE}:${SEMVER} - fi + retry_backoff(5,5) { + sh '''#! /bin/bash + set -e + for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do + [[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}" + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then + CACHEIMAGE=${i} + fi done - ''' - } + docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:latest -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + if [ -n "${SEMVER}" ]; then + docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + fi + done + ''' } } } @@ -806,57 +913,34 @@ pipeline { environment name: 'EXIT_STATUS', value: '' } steps { - withCredentials([ - [ - $class: 'UsernamePasswordMultiBinding', - credentialsId: 'Quay.io-Robot', - usernameVariable: 'QUAYUSER', - passwordVariable: 'QUAYPASS' - ] - ]) { - retry_backoff(5,5) { - sh '''#! /bin/bash - set -e - echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin - echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin - echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin - echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin - if [ "${CI}" == "false" ]; then - docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64 - docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} + retry_backoff(5,5) { + sh '''#! /bin/bash + set -e + for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do + [[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}" + IFS=',' read -ra CACHE <<< "$BUILDCACHE" + for i in "${CACHE[@]}"; do + if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then + CACHEIMAGE=${i} + fi + done + docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-latest -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-latest -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} + if [ -n "${SEMVER}" ]; then + docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} + docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} fi - for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do - docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} - docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest - docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} - docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} - docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest - docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} - if [ -n "${SEMVER}" ]; then - docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER} - docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER} - fi - docker push ${MANIFESTIMAGE}:amd64-${META_TAG} - docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} - docker push ${MANIFESTIMAGE}:amd64-latest - docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG} - docker push ${MANIFESTIMAGE}:arm64v8-latest - docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} - if [ -n "${SEMVER}" ]; then - docker push ${MANIFESTIMAGE}:amd64-${SEMVER} - docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER} - fi - done - for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do - docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest - docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} - docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} - if [ -n "${SEMVER}" ]; then - docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} - fi - done - ''' - } + done + for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do + docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest + docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} + + docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} + if [ -n "${SEMVER}" ]; then + docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} + fi + done + ''' } } } From 199a596d83ca0d9471ef702fd8e55d405c793486 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 3 Dec 2024 17:35:49 +0000 Subject: [PATCH 05/11] Bot Updating Templated Files --- readme-vars.yml | 103 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 69 insertions(+), 34 deletions(-) diff --git a/readme-vars.yml b/readme-vars.yml index 97d1325..adf62ea 100644 --- a/readme-vars.yml +++ b/readme-vars.yml @@ -6,49 +6,42 @@ project_url: "https://ui.com/" project_logo: "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/unifi-banner.png" project_blurb: "The [{{ project_name|capitalize }}]({{ project_url }}) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance." project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}" - # supported architectures available_architectures: - - { arch: "{{ arch_x86_64 }}", tag: "amd64-latest"} - - { arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"} - + - {arch: "{{ arch_x86_64 }}", tag: "amd64-latest"} + - {arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"} # container parameters param_container_name: "{{ project_name }}" param_usage_include_vols: true param_volumes: - - { vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/data", desc: "Persistent config files" } - + - {vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/data", desc: "Persistent config files"} param_usage_include_ports: true param_ports: - - { external_port: "8443", internal_port: "8443", port_desc: "Unifi web admin port" } - - { external_port: "3478", internal_port: "3478/udp", port_desc: "Unifi STUN port" } - - { external_port: "10001", internal_port: "10001/udp", port_desc: "Required for AP discovery" } - - { external_port: "8080", internal_port: "8080", port_desc: "Required for device communication" } - + - {external_port: "8443", internal_port: "8443", port_desc: "Unifi web admin port"} + - {external_port: "3478", internal_port: "3478/udp", port_desc: "Unifi STUN port"} + - {external_port: "10001", internal_port: "10001/udp", port_desc: "Required for AP discovery"} + - {external_port: "8080", internal_port: "8080", port_desc: "Required for device communication"} param_usage_include_env: true param_env_vars: - - { env_var: "MONGO_USER", env_value: "unifi", desc: "Mongodb Username. Only evaluated on first run. **Special characters must be [url encoded](https://en.wikipedia.org/wiki/Percent-encoding)**." } - - { env_var: "MONGO_PASS", env_value: "", desc: "Mongodb Password. Only evaluated on first run. **Special characters must be [url encoded](https://en.wikipedia.org/wiki/Percent-encoding)**." } - - { env_var: "MONGO_HOST", env_value: "unifi-db", desc: "Mongodb Hostname. Only evaluated on first run." } - - { env_var: "MONGO_PORT", env_value: "27017", desc: "Mongodb Port. Only evaluated on first run." } - - { env_var: "MONGO_DBNAME", env_value: "unifi", desc: "Mongodb Database Name (stats DB is automatically suffixed with `_stat`). Only evaluated on first run." } - - { env_var: "MONGO_AUTHSOURCE", env_value: "admin", desc: "Mongodb [authSource](https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource). For Atlas set to `admin`. Only evaluated on first run." } - + - {env_var: "MONGO_USER", env_value: "unifi", desc: "Mongodb Username. Only evaluated on first run. **Special characters must be [url encoded](https://en.wikipedia.org/wiki/Percent-encoding)**."} + - {env_var: "MONGO_PASS", env_value: "", desc: "Mongodb Password. Only evaluated on first run. **Special characters must be [url encoded](https://en.wikipedia.org/wiki/Percent-encoding)**."} + - {env_var: "MONGO_HOST", env_value: "unifi-db", desc: "Mongodb Hostname. Only evaluated on first run."} + - {env_var: "MONGO_PORT", env_value: "27017", desc: "Mongodb Port. Only evaluated on first run."} + - {env_var: "MONGO_DBNAME", env_value: "unifi", desc: "Mongodb Database Name (stats DB is automatically suffixed with `_stat`). Only evaluated on first run."} + - {env_var: "MONGO_AUTHSOURCE", env_value: "admin", desc: "Mongodb [authSource](https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource). For Atlas set to `admin`. Only evaluated on first run."} # optional container parameters opt_param_usage_include_env: true opt_param_env_vars: - - { env_var: "MEM_LIMIT", env_value: "1024", desc: "Optionally change the Java memory limit (in Megabytes). Set to `default` to reset to default" } - - { env_var: "MEM_STARTUP", env_value: "1024", desc: "Optionally change the Java initial/minimum memory (in Megabytes). Set to `default` to reset to default" } - - { env_var: "MONGO_TLS", env_value: "", desc: "Mongodb enable [TLS](https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.tls). Only evaluated on first run." } - + - {env_var: "MEM_LIMIT", env_value: "1024", desc: "Optionally change the Java memory limit (in Megabytes). Set to `default` to reset to default"} + - {env_var: "MEM_STARTUP", env_value: "1024", desc: "Optionally change the Java initial/minimum memory (in Megabytes). Set to `default` to reset to default"} + - {env_var: "MONGO_TLS", env_value: "", desc: "Mongodb enable [TLS](https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.tls). Only evaluated on first run."} opt_param_usage_include_ports: true opt_param_ports: - - { external_port: "1900", internal_port: "1900/udp", port_desc: "Required for `Make controller discoverable on L2 network` option" } - - { external_port: "8843", internal_port: "8843", port_desc: "Unifi guest portal HTTPS redirect port" } - - { external_port: "8880", internal_port: "8880", port_desc: "Unifi guest portal HTTP redirect port" } - - { external_port: "6789", internal_port: "6789", port_desc: "For mobile throughput test" } - - { external_port: "5514", internal_port: "5514/udp", port_desc: "Remote syslog port" } - + - {external_port: "1900", internal_port: "1900/udp", port_desc: "Required for `Make controller discoverable on L2 network` option"} + - {external_port: "8843", internal_port: "8843", port_desc: "Unifi guest portal HTTPS redirect port"} + - {external_port: "8880", internal_port: "8880", port_desc: "Unifi guest portal HTTP redirect port"} + - {external_port: "6789", internal_port: "6789", port_desc: "For mobile throughput test"} + - {external_port: "5514", internal_port: "5514/udp", port_desc: "Remote syslog port"} # application setup block app_setup_block_enabled: true app_setup_block: | @@ -141,11 +134,53 @@ app_setup_block: | The simplest migration approach is to take a full backup of your existing install, including history, from the Unifi-Controller web UI, then shut down the old container. You can then start up the new container with a clean `/config` mount (and a database container configured), and perform a restore using the setup wizard. - +# init diagram +init_diagram: | + "unifi-network-application:latest": { + docker-mods + base { + fix-attr +\nlegacy cont-init + } + docker-mods -> base + legacy-services + custom services + init-services -> legacy-services + init-services -> custom services + custom services -> legacy-services + legacy-services -> ci-service-check + init-migrations -> init-adduser + init-os-end -> init-config + init-config -> init-config-end + init-unifi-network-application-config -> init-config-end + init-os-end -> init-crontab-config + init-mods-end -> init-custom-files + base -> init-envfile + base -> init-migrations + base -> init-mods + init-config-end -> init-mods + init-mods -> init-mods-end + init-mods-package-install -> init-mods-end + init-mods -> init-mods-package-install + base -> init-os-end + init-adduser -> init-os-end + init-envfile -> init-os-end + init-migrations -> init-os-end + init-custom-files -> init-services + init-mods-end -> init-services + init-config -> init-unifi-network-application-config + init-services -> svc-cron + svc-cron -> legacy-services + init-services -> svc-unifi-network-application + svc-unifi-network-application -> legacy-services + } + Base Images: { + "baseimage-ubuntu:noble" + } + "unifi-network-application:latest" <- Base Images # changelog changelogs: - - { date: "11.08.24:", desc: "**Important**: The mongodb init instructions have been updated to enable auth ([RBAC](https://www.mongodb.com/docs/manual/core/authorization/#role-based-access-control)). We have been notified that if RBAC is not enabled, the official mongodb container allows remote access to the db contents over port 27017 without credentials. If you set up the mongodb container with the old instructions we provided, you should not map or expose port 27017. If you would like to enable auth, the easiest way is to create new instances of both unifi and mongodb with the new instructions and restore unifi from a backup." } - - { date: "11.08.24:", desc: "Rebase to Ubuntu Noble." } - - { date: "04.03.24:", desc: "Install from zip package instead of deb." } - - { date: "17.10.23:", desc: "Add environment variables for TLS and authSource to support Atlas and new MongoDB versions." } - - { date: "05.09.23:", desc: "Initial release." } + - {date: "11.08.24:", desc: "**Important**: The mongodb init instructions have been updated to enable auth ([RBAC](https://www.mongodb.com/docs/manual/core/authorization/#role-based-access-control)). We have been notified that if RBAC is not enabled, the official mongodb container allows remote access to the db contents over port 27017 without credentials. If you set up the mongodb container with the old instructions we provided, you should not map or expose port 27017. If you would like to enable auth, the easiest way is to create new instances of both unifi and mongodb with the new instructions and restore unifi from a backup."} + - {date: "11.08.24:", desc: "Rebase to Ubuntu Noble."} + - {date: "04.03.24:", desc: "Install from zip package instead of deb."} + - {date: "17.10.23:", desc: "Add environment variables for TLS and authSource to support Atlas and new MongoDB versions."} + - {date: "05.09.23:", desc: "Initial release."} From d030680133b6e07b2c2641839e0c5de5b28d49ba Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 3 Dec 2024 17:37:10 +0000 Subject: [PATCH 06/11] Bot Updating Templated Files --- README.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index c2df8cf..50b0884 100644 --- a/README.md +++ b/README.md @@ -148,7 +148,6 @@ The simplest migration approach is to take a full backup of your existing instal You can then start up the new container with a clean `/config` mount (and a database container configured), and perform a restore using the setup wizard. - ### Strict reverse proxies This image uses a self-signed certificate by default. This naturally means the scheme is `https`. @@ -158,6 +157,9 @@ If you are using a reverse proxy which validates certificates, you need to [disa To help you get started creating a container from this image you can either use docker-compose or the docker cli. +>[!NOTE] +>Unless a parameter is flaged as 'optional', it is *mandatory* and a value must be provided. + ### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose)) ```yaml @@ -231,10 +233,10 @@ Containers are configured using parameters passed at runtime (such as those abov | Parameter | Function | | :----: | --- | -| `-p 8443` | Unifi web admin port | -| `-p 3478/udp` | Unifi STUN port | -| `-p 10001/udp` | Required for AP discovery | -| `-p 8080` | Required for device communication | +| `-p 8443:8443` | Unifi web admin port | +| `-p 3478:3478/udp` | Unifi STUN port | +| `-p 10001:10001/udp` | Required for AP discovery | +| `-p 8080:8080` | Required for device communication | | `-p 1900/udp` | Required for `Make controller discoverable on L2 network` option | | `-p 8843` | Unifi guest portal HTTPS redirect port | | `-p 8880` | Unifi guest portal HTTP redirect port | From fa90814925f06901b0dc1a9e7cc33bf061bfb125 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 10 Dec 2024 17:34:10 +0000 Subject: [PATCH 07/11] Bot Updating Templated Files --- readme-vars.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/readme-vars.yml b/readme-vars.yml index adf62ea..11d4711 100644 --- a/readme-vars.yml +++ b/readme-vars.yml @@ -151,22 +151,18 @@ init_diagram: | init-migrations -> init-adduser init-os-end -> init-config init-config -> init-config-end + init-crontab-config -> init-config-end init-unifi-network-application-config -> init-config-end - init-os-end -> init-crontab-config + init-config -> init-crontab-config init-mods-end -> init-custom-files base -> init-envfile base -> init-migrations - base -> init-mods init-config-end -> init-mods - init-mods -> init-mods-end init-mods-package-install -> init-mods-end init-mods -> init-mods-package-install - base -> init-os-end init-adduser -> init-os-end init-envfile -> init-os-end - init-migrations -> init-os-end init-custom-files -> init-services - init-mods-end -> init-services init-config -> init-unifi-network-application-config init-services -> svc-cron svc-cron -> legacy-services From a554c80adef3401aea75077a89b9bcc6f5d47334 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 10 Dec 2024 17:35:26 +0000 Subject: [PATCH 08/11] Bot Updating Templated Files --- .github/workflows/external_trigger.yml | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/workflows/external_trigger.yml b/.github/workflows/external_trigger.yml index eaff76e..e0c7cb3 100755 --- a/.github/workflows/external_trigger.yml +++ b/.github/workflows/external_trigger.yml @@ -43,16 +43,18 @@ jobs: token=$(curl -sX GET \ "https://ghcr.io/token?scope=repository%3Alinuxserver%2Funifi-network-application%3Apull" \ | jq -r '.token') - multidigest=$(curl -s \ - --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ - --header "Authorization: Bearer ${token}" \ - "https://ghcr.io/v2/${image}/manifests/${tag}" \ - | jq -r 'first(.manifests[].digest)') - digest=$(curl -s \ - --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ - --header "Authorization: Bearer ${token}" \ - "https://ghcr.io/v2/${image}/manifests/${multidigest}" \ - | jq -r '.config.digest') + multidigest=$(curl -s \ + --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ + --header "Accept: application/vnd.oci.image.index.v1+json" \ + --header "Authorization: Bearer ${token}" \ + "https://ghcr.io/v2/${image}/manifests/${tag}") + multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}") + digest=$(curl -s \ + --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ + --header "Accept: application/vnd.oci.image.manifest.v1+json" \ + --header "Authorization: Bearer ${token}" \ + "https://ghcr.io/v2/${image}/manifests/${multidigest}" \ + | jq -r '.config.digest') image_info=$(curl -sL \ --header "Authorization: Bearer ${token}" \ "https://ghcr.io/v2/${image}/blobs/${digest}") From 950358ef4e32a87f604e2f449f80197b3a91db88 Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 10 Dec 2024 17:38:57 +0000 Subject: [PATCH 09/11] Bot Updating Package Versions --- package_versions.txt | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/package_versions.txt b/package_versions.txt index 5e1bd8d..1c2c890 100755 --- a/package_versions.txt +++ b/package_versions.txt @@ -48,7 +48,7 @@ coreutils 9.4-3ubuntu6 deb cron 3.0pl1-184ubuntu2 deb cron-daemon-common 3.0pl1-184ubuntu2 deb cron4j 2.2.5 java-archive -curl 8.5.0-2ubuntu10.4 deb +curl 8.5.0-2ubuntu10.5 deb dash 0.5.12-6ubuntu5 deb debconf 1.5.86ubuntu1 deb debianutils 5.17build1 deb @@ -150,14 +150,14 @@ jstun 0.7.4 jav jsvc 1.0.15-11build1 deb jul-to-slf4j 2.0.13 java-archive keyboxd 2.4.4-2ubuntu17 deb -krb5-locales 1.20.1-6ubuntu2.1 deb +krb5-locales 1.20.1-6ubuntu2.2 deb lazysodium-java 5.1.4 java-archive -libacl1 2.3.2-1build1 deb +libacl1 2.3.2-1build1.1 deb libapt-pkg6.0t64 2.7.14build2 deb libassuan0 2.5.6-1build1 deb libattr1 1:2.5.2-1build1 deb -libaudit-common 1:3.1.2-2.1build1 deb -libaudit1 1:3.1.2-2.1build1 deb +libaudit-common 1:3.1.2-2.1build1.1 deb +libaudit1 1:3.1.2-2.1build1.1 deb libblkid1 2.39.3-9ubuntu6.1 deb libbrotli1 1.1.0-2build2 deb libbsd0 0.12.1-1build1 deb @@ -169,7 +169,7 @@ libcap2 1:2.66-5ubuntu2 deb libcom-err2 1.47.0-2.4~exp1ubuntu4.1 deb libcommons-daemon-java 1.0.15-11build1 deb libcrypt1 1:4.4.36-4build1 deb -libcurl4t64 8.5.0-2ubuntu10.4 deb +libcurl4t64 8.5.0-2ubuntu10.5 deb libdb5.3t64 5.3.28+dfsg2-7 deb libdebconfclient0 0.271ubuntu3 deb libext2fs2t64 1.47.0-2.4~exp1ubuntu4.1 deb @@ -179,20 +179,20 @@ libgcrypt20 1.10.3-2build1 deb libgmp10 2:6.3.0+dfsg-2ubuntu6 deb libgnutls30t64 3.8.3-1.1ubuntu3.2 deb libgpg-error0 1.47-3build2 deb -libgssapi-krb5-2 1.20.1-6ubuntu2.1 deb +libgssapi-krb5-2 1.20.1-6ubuntu2.2 deb libhogweed6t64 3.9.1-2.2build1.1 deb libidn2-0 2.3.7-2build1 deb libjpeg-turbo8 2.1.5-2ubuntu2 deb libjpeg8 8c-2ubuntu11 deb libjq1 1.7.1-3build1 deb -libk5crypto3 1.20.1-6ubuntu2.1 deb +libk5crypto3 1.20.1-6ubuntu2.2 deb libkeyutils1 1.6.3-3build1 deb -libkrb5-3 1.20.1-6ubuntu2.1 deb -libkrb5support0 1.20.1-6ubuntu2.1 deb +libkrb5-3 1.20.1-6ubuntu2.2 deb +libkrb5support0 1.20.1-6ubuntu2.2 deb libksba8 1.6.6-1build1 deb liblcms2-2 2.14-2build1 deb -libldap-common 2.6.7+dfsg-1~exp1ubuntu8 deb -libldap2 2.6.7+dfsg-1~exp1ubuntu8 deb +libldap-common 2.6.7+dfsg-1~exp1ubuntu8.1 deb +libldap2 2.6.7+dfsg-1~exp1ubuntu8.1 deb liblz4-1 1.9.4-1build1.1 deb liblzma5 5.6.1+really5.4.5-1build0.1 deb libmd0 1.1.0-2build1 deb From 2fc98aa77e47f2f36e39f6769a1f001174a8b2ea Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 17 Dec 2024 17:31:51 +0000 Subject: [PATCH 10/11] Bot Updating Templated Files --- Jenkinsfile | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 80d3574..d8645db 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -575,7 +575,7 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ - --provenance=false --sbom=false --builder=container --load \ + --provenance=true --sbom=true --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." sh '''#! /bin/bash set -e @@ -604,7 +604,9 @@ pipeline { for i in "${CACHE[@]}"; do docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & done - wait + for p in $(jobs -p); do + wait "$p" || { echo "job $p failed" >&2; exit 1; } + done fi ''' } @@ -639,7 +641,7 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ - --provenance=false --sbom=false --builder=container --load \ + --provenance=true --sbom=true --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." sh '''#! /bin/bash set -e @@ -668,7 +670,9 @@ pipeline { for i in "${CACHE[@]}"; do docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & done - wait + for p in $(jobs -p); do + wait "$p" || { echo "job $p failed" >&2; exit 1; } + done fi ''' } @@ -696,7 +700,7 @@ pipeline { --label \"org.opencontainers.image.title=Unifi-network-application\" \ --label \"org.opencontainers.image.description=The [Unifi-network-application](https://ui.com/) software is a powerful, enterprise wireless software engine ideal for high-density client deployments requiring low latency and high uptime performance.\" \ --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ - --provenance=false --sbom=false --builder=container --load \ + --provenance=true --sbom=true --builder=container --load \ --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." sh '''#! /bin/bash set -e @@ -725,7 +729,9 @@ pipeline { for i in "${CACHE[@]}"; do docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} & done - wait + for p in $(jobs -p); do + wait "$p" || { echo "job $p failed" >&2; exit 1; } + done fi ''' } From fb56c19e156895e5d3a2f25c1a39f448dbaae58a Mon Sep 17 00:00:00 2001 From: LinuxServer-CI Date: Tue, 17 Dec 2024 17:33:14 +0000 Subject: [PATCH 11/11] Bot Updating Templated Files --- .github/workflows/external_trigger.yml | 33 ++++++++++++++----- .../workflows/package_trigger_scheduler.yml | 27 ++++++++++++--- 2 files changed, 47 insertions(+), 13 deletions(-) diff --git a/.github/workflows/external_trigger.yml b/.github/workflows/external_trigger.yml index e0c7cb3..2ebde07 100755 --- a/.github/workflows/external_trigger.yml +++ b/.github/workflows/external_trigger.yml @@ -48,13 +48,30 @@ jobs: --header "Accept: application/vnd.oci.image.index.v1+json" \ --header "Authorization: Bearer ${token}" \ "https://ghcr.io/v2/${image}/manifests/${tag}") - multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}") - digest=$(curl -s \ - --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ - --header "Accept: application/vnd.oci.image.manifest.v1+json" \ - --header "Authorization: Bearer ${token}" \ - "https://ghcr.io/v2/${image}/manifests/${multidigest}" \ - | jq -r '.config.digest') + if jq -e '.layers // empty' <<< "${multidigest}" >/dev/null 2>&1; then + # If there's a layer element it's a single-arch manifest so just get that digest + digest=$(jq -r '.config.digest' <<< "${multidigest}") + else + # Otherwise it's multi-arch or has manifest annotations + if jq -e '.manifests[]?.annotations // empty' <<< "${multidigest}" >/dev/null 2>&1; then + # Check for manifest annotations and delete if found + multidigest=$(jq 'del(.manifests[] | select(.annotations))' <<< "${multidigest}") + fi + if [[ $(jq '.manifests | length' <<< "${multidigest}") -gt 1 ]]; then + # If there's still more than one digest, it's multi-arch + multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}") + else + # Otherwise it's single arch + multidigest=$(jq -r ".manifests[].digest?" <<< "${multidigest}") + fi + if digest=$(curl -s \ + --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ + --header "Accept: application/vnd.oci.image.manifest.v1+json" \ + --header "Authorization: Bearer ${token}" \ + "https://ghcr.io/v2/${image}/manifests/${multidigest}"); then + digest=$(jq -r '.config.digest' <<< "${digest}"); + fi + fi image_info=$(curl -sL \ --header "Authorization: Bearer ${token}" \ "https://ghcr.io/v2/${image}/blobs/${digest}") @@ -92,7 +109,7 @@ jobs: else printf "\n## Trigger new build\n\n" >> $GITHUB_STEP_SUMMARY echo "New version \`${EXT_RELEASE}\` found; old version was \`${IMAGE_VERSION}\`. Triggering new build" >> $GITHUB_STEP_SUMMARY - if "${artifacts_found}" == "true" ]]; then + if [[ "${artifacts_found}" == "true" ]]; then echo "All artifacts seem to be uploaded." >> $GITHUB_STEP_SUMMARY fi response=$(curl -iX POST \ diff --git a/.github/workflows/package_trigger_scheduler.yml b/.github/workflows/package_trigger_scheduler.yml index 3881143..94eddb8 100755 --- a/.github/workflows/package_trigger_scheduler.yml +++ b/.github/workflows/package_trigger_scheduler.yml @@ -27,9 +27,18 @@ jobs: fi printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY JENKINS_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-unifi-network-application/${br}/jenkins-vars.yml) - if [[ "${br}" == $(yq -r '.ls_branch' <<< "${JENKINS_VARS}") ]]; then + if ! curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-unifi-network-application/${br}/Jenkinsfile >/dev/null 2>&1; then + echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY + echo "> No Jenkinsfile found. Branch is either deprecated or is an early dev branch." >> $GITHUB_STEP_SUMMARY + skipped_branches="${skipped_branches}${br} " + elif [[ "${br}" == $(yq -r '.ls_branch' <<< "${JENKINS_VARS}") ]]; then echo "Branch appears to be live; checking workflow." >> $GITHUB_STEP_SUMMARY - if [[ $(yq -r '.skip_package_check' <<< "${JENKINS_VARS}") == "true" ]]; then + README_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-unifi-network-application/${br}/readme-vars.yml) + if [[ $(yq -r '.project_deprecation_status' <<< "${README_VARS}") == "true" ]]; then + echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY + echo "> Branch appears to be deprecated; skipping trigger." >> $GITHUB_STEP_SUMMARY + skipped_branches="${skipped_branches}${br} " + elif [[ $(yq -r '.skip_package_check' <<< "${JENKINS_VARS}") == "true" ]]; then echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY echo "> Skipping branch ${br} due to \`skip_package_check\` being set in \`jenkins-vars.yml\`." >> $GITHUB_STEP_SUMMARY skipped_branches="${skipped_branches}${br} " @@ -37,7 +46,7 @@ jobs: echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY echo "> Github organizational variable \`SKIP_PACKAGE_TRIGGER\` contains \`unifi-network-application_${br}\`; skipping trigger." >> $GITHUB_STEP_SUMMARY skipped_branches="${skipped_branches}${br} " - elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-unifi-network-application/job/${br}/lastBuild/api/json | jq -r '.building') == "true" ]; then + elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-unifi-network-application/job/${br}/lastBuild/api/json | jq -r '.building' 2>/dev/null) == "true" ]; then echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY echo "> There already seems to be an active build on Jenkins; skipping package trigger for ${br}" >> $GITHUB_STEP_SUMMARY skipped_branches="${skipped_branches}${br} " @@ -49,6 +58,11 @@ jobs: response=$(curl -iX POST \ https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-unifi-network-application/job/${br}/buildWithParameters?PACKAGE_CHECK=true \ --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|") + if [[ -z "${response}" ]]; then + echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY + echo "> Jenkins build could not be triggered. Skipping branch." + continue + fi echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY sleep 10 @@ -56,11 +70,14 @@ jobs: buildurl="${buildurl%$'\r'}" echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY - curl -iX POST \ + if ! curl -ifX POST \ "${buildurl}submitDescription" \ --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \ --data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \ - --data-urlencode "Submit=Submit" + --data-urlencode "Submit=Submit"; then + echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY + echo "> Unable to change the Jenkins job description." + fi sleep 20 fi else