Compare commits

...

79 Commits

Author SHA1 Message Date
LinuxServer-CI
77a7ccdf86 Bot Updating Package Versions 2025-04-20 06:53:51 +00:00
LinuxServer-CI
f08df1a776 Bot Updating Package Versions 2025-04-13 06:53:41 +00:00
LinuxServer-CI
0aa7148d7b Bot Updating Package Versions 2025-04-06 06:55:31 +00:00
LinuxServer-CI
a5e9256477 Bot Updating Package Versions 2025-03-30 06:53:31 +00:00
LinuxServer-CI
03bc9ef5ab Bot Updating Package Versions 2025-03-23 06:53:52 +00:00
LinuxServer-CI
7749f9c906 Bot Updating Package Versions 2025-03-16 06:52:54 +00:00
LinuxServer-CI
981e170052 Bot Updating Package Versions 2025-03-09 06:51:40 +00:00
LinuxServer-CI
9e884da12c Bot Updating Package Versions 2025-03-02 06:52:02 +00:00
LinuxServer-CI
f3cf3e3707 Bot Updating Package Versions 2025-02-23 06:52:05 +00:00
LinuxServer-CI
cce5a5ad54 Bot Updating Package Versions 2025-02-19 20:49:27 +00:00
LinuxServer-CI
83125a9274 Bot Updating Package Versions 2025-02-16 06:54:11 +00:00
LinuxServer-CI
36a8a856d4 Bot Updating Templated Files 2025-02-16 06:51:23 +00:00
LinuxServer-CI
cf06829803 Bot Updating Package Versions 2025-02-09 06:57:36 +00:00
LinuxServer-CI
c978fd753a Bot Updating Templated Files 2025-02-04 18:26:14 +00:00
Adam
c7b0cbc559 Merge pull request #371 from linuxserver/port-fix-master
Fix port in readme (master)
2025-02-04 18:22:55 +00:00
LinuxServer-CI
043d41954c Bot Updating Package Versions 2025-02-02 06:56:50 +00:00
LinuxServer-CI
a9836226ee Bot Updating Templated Files 2025-02-02 06:53:12 +00:00
LinuxServer-CI
69a30d94a3 Bot Updating Templated Files 2025-02-02 06:51:20 +00:00
thespad
128b53f719 Fix port in readme 2025-01-30 11:38:18 +00:00
LinuxServer-CI
616ea2bfe5 Bot Updating Package Versions 2025-01-26 06:52:12 +00:00
quietsy
831d9b8639 Merge pull request #366 from linuxserver/add-project-categories
Add categories to readme-vars.yml
2025-01-22 22:13:54 +02:00
quietsy
2b157467db Add categories to readme-vars.yml 2025-01-22 20:43:35 +02:00
LinuxServer-CI
9c6d8af1e5 Bot Updating Package Versions 2025-01-19 06:52:13 +00:00
LinuxServer-CI
89e76af14b Bot Updating Package Versions 2025-01-12 06:52:29 +00:00
LinuxServer-CI
3979c2c2f0 Bot Updating Package Versions 2025-01-05 06:53:42 +00:00
LinuxServer-CI
307f76772a Bot Updating Package Versions 2025-01-01 22:48:14 +00:00
LinuxServer-CI
67bd8527ae Bot Updating Templated Files 2025-01-01 22:45:15 +00:00
Adam
4178b625a0 Merge pull request #357 from linuxserver/master-nonroot 2025-01-01 22:43:48 +00:00
LinuxServer-CI
eaf55cf967 Bot Updating Package Versions 2024-12-29 06:52:08 +00:00
LinuxServer-CI
ce620b642c Bot Updating Package Versions 2024-12-22 06:52:11 +00:00
thespad
016be7b8bd Support nonroot operation 2024-12-20 20:20:49 +00:00
LinuxServer-CI
b6f4c2c2ae Bot Updating Package Versions 2024-12-19 15:03:53 +00:00
LinuxServer-CI
771d61a5d6 Bot Updating Templated Files 2024-12-19 14:56:19 +00:00
LinuxServer-CI
bd4cee6e36 Bot Updating Templated Files 2024-12-19 14:53:23 +00:00
LinuxServer-CI
7c74f95173 Bot Updating Templated Files 2024-12-19 14:49:50 +00:00
LinuxServer-CI
803acb713c Bot Updating Package Versions 2024-12-01 06:54:58 +00:00
LinuxServer-CI
c2134670e6 Bot Updating Package Versions 2024-11-24 06:53:53 +00:00
LinuxServer-CI
4cb4668b2d Bot Updating Package Versions 2024-11-20 20:48:47 +00:00
LinuxServer-CI
84a6c72d65 Bot Updating Package Versions 2024-11-17 06:54:45 +00:00
LinuxServer-CI
56b84f7735 Bot Updating Package Versions 2024-11-10 06:54:38 +00:00
LinuxServer-CI
4535a9f1d8 Bot Updating Templated Files 2024-11-10 06:52:09 +00:00
LinuxServer-CI
e10e37f0a7 Bot Updating Templated Files 2024-11-10 06:50:48 +00:00
LinuxServer-CI
30ccc1c2a0 Bot Updating Package Versions 2024-11-02 05:48:35 +00:00
LinuxServer-CI
938bb587d7 Bot Updating Package Versions 2024-10-27 06:51:49 +00:00
LinuxServer-CI
67e8286cfe Bot Updating Package Versions 2024-10-20 06:51:59 +00:00
LinuxServer-CI
24d1630688 Bot Updating Package Versions 2024-10-13 06:51:45 +00:00
LinuxServer-CI
a13e4722f7 Bot Updating Package Versions 2024-10-10 16:51:54 +00:00
LinuxServer-CI
95f4150538 Bot Updating Package Versions 2024-10-08 10:11:44 +00:00
Adam
efa08e0e83 Merge pull request #333 from linuxserver/master-dynamic-bind-address 2024-10-08 11:09:04 +01:00
thespad
4573a98e14 Read webui bind address from config if present. 2024-10-07 20:16:53 +01:00
LinuxServer-CI
4437b92c16 Bot Updating Package Versions 2024-10-06 06:52:08 +00:00
LinuxServer-CI
d3597ffea1 Bot Updating Package Versions 2024-09-30 17:49:32 +00:00
LinuxServer-CI
2f793f5456 Bot Updating Package Versions 2024-09-30 15:49:14 +00:00
LinuxServer-CI
50cd7a6a07 Bot Updating Package Versions 2024-09-29 06:56:52 +00:00
LinuxServer-CI
a6ba44ffa0 Bot Updating Templated Files 2024-09-29 06:54:04 +00:00
LinuxServer-CI
fa9f5c9cb3 Bot Updating Templated Files 2024-09-29 06:52:47 +00:00
LinuxServer-CI
4dcdd36ff4 Bot Updating Templated Files 2024-09-29 06:51:31 +00:00
LinuxServer-CI
d252212ad4 Bot Updating Package Versions 2024-09-22 06:52:00 +00:00
LinuxServer-CI
9ff3a279b6 Bot Updating Package Versions 2024-09-17 16:52:20 +00:00
LinuxServer-CI
8d61598a62 Bot Updating Package Versions 2024-09-15 06:52:34 +00:00
LinuxServer-CI
29fa20e177 Bot Updating Package Versions 2024-09-08 06:51:44 +00:00
LinuxServer-CI
a28fe290e2 Bot Updating Package Versions 2024-08-25 06:52:34 +00:00
LinuxServer-CI
e8bbab67d9 Bot Updating Package Versions 2024-08-20 13:49:51 +00:00
LinuxServer-CI
71ee7f2648 Bot Updating Package Versions 2024-08-18 06:54:49 +00:00
LinuxServer-CI
9968a694df Bot Updating Templated Files 2024-08-18 06:52:32 +00:00
LinuxServer-CI
b192447473 Bot Updating Templated Files 2024-08-18 06:51:02 +00:00
LinuxServer-CI
7363dd5dda Bot Updating Package Versions 2024-08-11 06:52:03 +00:00
LinuxServer-CI
9cb07c720e Bot Updating Package Versions 2024-08-04 06:51:55 +00:00
LinuxServer-CI
843a85bbfb Bot Updating Package Versions 2024-07-28 06:52:11 +00:00
LinuxServer-CI
6b9f69684e Bot Updating Package Versions 2024-07-17 12:24:04 +00:00
LinuxServer-CI
91490c2651 Bot Updating Templated Files 2024-07-17 12:20:53 +00:00
Adam
0c7cf285b6 Merge pull request #317 from linuxserver/master-cli
Restore qbittorrent-cli (master)
2024-07-17 13:18:35 +01:00
thespad
41cd38a43c Fix static version in url 2024-07-17 11:43:28 +01:00
thespad
5e4d830356 Use netcore 6 version 2024-07-17 11:22:18 +01:00
thespad
7b4c3d8977 Fix arm cli path 2024-07-17 11:18:58 +01:00
thespad
5f06988083 Restore qbittorrent-cli 2024-07-17 11:16:02 +01:00
LinuxServer-CI
fe0e7b895a Bot Updating Package Versions 2024-07-14 06:55:19 +00:00
LinuxServer-CI
95f5fee13f Bot Updating Templated Files 2024-07-14 06:53:26 +00:00
LinuxServer-CI
e1158e49f9 Bot Updating Templated Files 2024-07-14 06:52:10 +00:00
16 changed files with 778 additions and 416 deletions

View File

@@ -6,7 +6,7 @@
* Read, and fill the Pull Request template * Read, and fill the Pull Request template
* If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR * If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR
* If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message * If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message
* If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://discord.gg/YWrKVTn) * If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://linuxserver.io/discord)
## Common files ## Common files
@@ -105,10 +105,10 @@ docker build \
-t linuxserver/qbittorrent:latest . -t linuxserver/qbittorrent:latest .
``` ```
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
```bash ```bash
docker run --rm --privileged multiarch/qemu-user-static:register --reset docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
``` ```
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.

View File

@@ -1,7 +1,7 @@
blank_issues_enabled: false blank_issues_enabled: false
contact_links: contact_links:
- name: Discord chat support - name: Discord chat support
url: https://discord.gg/YWrKVTn url: https://linuxserver.io/discord
about: Realtime support / chat with the community and the team. about: Realtime support / chat with the community and the team.
- name: Discourse discussion forum - name: Discourse discussion forum

View File

@@ -11,19 +11,32 @@ jobs:
- name: External Trigger - name: External Trigger
if: github.ref == 'refs/heads/master' if: github.ref == 'refs/heads/master'
env:
SKIP_EXTERNAL_TRIGGER: ${{ vars.SKIP_EXTERNAL_TRIGGER }}
run: | run: |
if [ -n "${{ secrets.PAUSE_EXTERNAL_TRIGGER_QBITTORRENT_MASTER }}" ]; then printf "# External trigger for docker-qbittorrent\n\n" >> $GITHUB_STEP_SUMMARY
echo "**** Github secret PAUSE_EXTERNAL_TRIGGER_QBITTORRENT_MASTER is set; skipping trigger. ****" if grep -q "^qbittorrent_master_" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "Github secret \`PAUSE_EXTERNAL_TRIGGER_QBITTORRENT_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`qbittorrent_master_\`; will skip trigger if version matches." >> $GITHUB_STEP_SUMMARY
elif grep -q "^qbittorrent_master" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`qbittorrent_master\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
fi fi
echo "**** External trigger running off of master branch. To disable this trigger, set a Github secret named \"PAUSE_EXTERNAL_TRIGGER_QBITTORRENT_MASTER\". ****" echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "External trigger running off of master branch. To disable this trigger, set a Github secret named \`PAUSE_EXTERNAL_TRIGGER_QBITTORRENT_MASTER\`" >> $GITHUB_STEP_SUMMARY echo "> External trigger running off of master branch. To disable this trigger, add \`qbittorrent_master\` into the Github organizational variable \`SKIP_EXTERNAL_TRIGGER\`." >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving external version ****" printf "\n## Retrieving external version\n\n" >> $GITHUB_STEP_SUMMARY
EXT_RELEASE=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \ EXT_RELEASE=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
&& awk '/^P:'"qbittorrent-nox"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://') && awk '/^P:'"qbittorrent-nox"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://')
echo "Type is \`alpine_repo\`" >> $GITHUB_STEP_SUMMARY
if grep -q "^qbittorrent_master_${EXT_RELEASE}" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` matches current external release; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0
fi
if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then
echo "**** Can't retrieve external version, exiting ****" echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Can't retrieve external version, exiting" >> $GITHUB_STEP_SUMMARY
FAILURE_REASON="Can't retrieve external version for qbittorrent branch master" FAILURE_REASON="Can't retrieve external version for qbittorrent branch master"
GHA_TRIGGER_URL="https://github.com/linuxserver/docker-qbittorrent/actions/runs/${{ github.run_id }}" GHA_TRIGGER_URL="https://github.com/linuxserver/docker-qbittorrent/actions/runs/${{ github.run_id }}"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680, curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
@@ -32,24 +45,42 @@ jobs:
exit 1 exit 1
fi fi
EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g') EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g')
echo "**** External version: ${EXT_RELEASE} ****" echo "External version: \`${EXT_RELEASE}\`" >> $GITHUB_STEP_SUMMARY
echo "External version: ${EXT_RELEASE}" >> $GITHUB_STEP_SUMMARY echo "Retrieving last pushed version" >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving last pushed version ****"
image="linuxserver/qbittorrent" image="linuxserver/qbittorrent"
tag="latest" tag="latest"
token=$(curl -sX GET \ token=$(curl -sX GET \
"https://ghcr.io/token?scope=repository%3Alinuxserver%2Fqbittorrent%3Apull" \ "https://ghcr.io/token?scope=repository%3Alinuxserver%2Fqbittorrent%3Apull" \
| jq -r '.token') | jq -r '.token')
multidigest=$(curl -s \ multidigest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Accept: application/vnd.oci.image.index.v1+json" \
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${tag}")
if jq -e '.layers // empty' <<< "${multidigest}" >/dev/null 2>&1; then
# If there's a layer element it's a single-arch manifest so just get that digest
digest=$(jq -r '.config.digest' <<< "${multidigest}")
else
# Otherwise it's multi-arch or has manifest annotations
if jq -e '.manifests[]?.annotations // empty' <<< "${multidigest}" >/dev/null 2>&1; then
# Check for manifest annotations and delete if found
multidigest=$(jq 'del(.manifests[] | select(.annotations))' <<< "${multidigest}")
fi
if [[ $(jq '.manifests | length' <<< "${multidigest}") -gt 1 ]]; then
# If there's still more than one digest, it's multi-arch
multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}")
else
# Otherwise it's single arch
multidigest=$(jq -r ".manifests[].digest?" <<< "${multidigest}")
fi
if digest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Accept: application/vnd.oci.image.manifest.v1+json" \
--header "Authorization: Bearer ${token}" \ --header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${tag}" \ "https://ghcr.io/v2/${image}/manifests/${multidigest}"); then
| jq -r 'first(.manifests[].digest)') digest=$(jq -r '.config.digest' <<< "${digest}");
digest=$(curl -s \ fi
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ fi
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${multidigest}" \
| jq -r '.config.digest')
image_info=$(curl -sL \ image_info=$(curl -sL \
--header "Authorization: Bearer ${token}" \ --header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/blobs/${digest}") "https://ghcr.io/v2/${image}/blobs/${digest}")
@@ -61,53 +92,61 @@ jobs:
IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}') IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}')
IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}') IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}')
if [ -z "${IMAGE_VERSION}" ]; then if [ -z "${IMAGE_VERSION}" ]; then
echo "**** Can't retrieve last pushed version, exiting ****" echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "Can't retrieve last pushed version, exiting" >> $GITHUB_STEP_SUMMARY
FAILURE_REASON="Can't retrieve last pushed version for qbittorrent tag latest" FAILURE_REASON="Can't retrieve last pushed version for qbittorrent tag latest"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680, curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}], "description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
exit 1 exit 1
fi fi
echo "**** Last pushed version: ${IMAGE_VERSION} ****" echo "Last pushed version: \`${IMAGE_VERSION}\`" >> $GITHUB_STEP_SUMMARY
echo "Last pushed version: ${IMAGE_VERSION}" >> $GITHUB_STEP_SUMMARY
if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then
echo "**** Version ${EXT_RELEASE} already pushed, exiting ****" echo "Version \`${EXT_RELEASE}\` already pushed, exiting" >> $GITHUB_STEP_SUMMARY
echo "Version ${EXT_RELEASE} already pushed, exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
elif [[ $(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/aarch64/APKINDEX.tar.gz" | tar -xz -C /tmp && awk '/^P:'"qbittorrent-nox"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://') != "${EXT_RELEASE}" ]]; then elif [[ $(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/aarch64/APKINDEX.tar.gz" | tar -xz -C /tmp && awk '/^P:'"qbittorrent-nox"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://') != "${EXT_RELEASE}" ]]; then
echo "**** New version ${EXT_RELEASE} found; but not all arch repos updated yet; exiting ****" echo "New version \`${EXT_RELEASE}\` found; but not all arch repos updated yet; exiting" >> $GITHUB_STEP_SUMMARY
echo "New version ${EXT_RELEASE} found; but not all arch repos updated yet; exiting" >> $GITHUB_STEP_SUMMARY
FAILURE_REASON="New version ${EXT_RELEASE} for qbittorrent tag latest is detected, however not all arch repos are updated yet. Will try again later." FAILURE_REASON="New version ${EXT_RELEASE} for qbittorrent tag latest is detected, however not all arch repos are updated yet. Will try again later."
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}], "description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
exit 0 exit 0
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting ****" echo "New version \`${EXT_RELEASE}\` found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
echo "New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
else else
echo "**** New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build ****" if [[ "${artifacts_found}" == "false" ]]; then
echo "New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build" >> $GITHUB_STEP_SUMMARY echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \ echo "> New version detected, but not all artifacts are published yet; skipping trigger" >> $GITHUB_STEP_SUMMARY
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/buildWithParameters?PACKAGE_CHECK=false \ FAILURE_REASON="New version ${EXT_RELEASE} for qbittorrent tag latest is detected, however not all artifacts are uploaded to upstream release yet. Will try again later."
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|") curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
echo "**** Jenkins job queue url: ${response%$'\r'} ****" "description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
echo "**** Sleeping 10 seconds until job starts ****" "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
sleep 10 else
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url') printf "\n## Trigger new build\n\n" >> $GITHUB_STEP_SUMMARY
buildurl="${buildurl%$'\r'}" echo "New version \`${EXT_RELEASE}\` found; old version was \`${IMAGE_VERSION}\`. Triggering new build" >> $GITHUB_STEP_SUMMARY
echo "**** Jenkins job build url: ${buildurl} ****" if [[ "${artifacts_found}" == "true" ]]; then
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY echo "All artifacts seem to be uploaded." >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****" fi
curl -iX POST \ response=$(curl -iX POST \
"${buildurl}submitDescription" \ https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/buildWithParameters?PACKAGE_CHECK=false \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \ --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \ echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
--data-urlencode "Submit=Submit" echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
echo "**** Notifying Discord ****" sleep 10
TRIGGER_REASON="A version change was detected for qbittorrent tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE}" buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, buildurl="${buildurl%$'\r'}"
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}], echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
curl -iX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"
echo "**** Notifying Discord ****"
TRIGGER_REASON="A version change was detected for qbittorrent tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE}"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
fi
fi fi

View File

@@ -15,31 +15,31 @@ jobs:
- name: External Trigger Scheduler - name: External Trigger Scheduler
run: | run: |
echo "**** Branches found: ****" printf "# External trigger scheduler for docker-qbittorrent\n\n" >> $GITHUB_STEP_SUMMARY
git for-each-ref --format='%(refname:short)' refs/remotes printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') if [[ "${br}" == "HEAD" ]]; then
echo "**** Evaluating branch ${br} ****" printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
continue
fi
printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/jenkins-vars.yml) ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/jenkins-vars.yml)
ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch') ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type') ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
echo "**** Branch ${br} appears to be live and trigger is not os; checking workflow. ****" echo "Branch appears to be live and trigger is not os; checking workflow." >> $GITHUB_STEP_SUMMARY
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****." echo "Triggering external trigger workflow for branch." >> $GITHUB_STEP_SUMMARY
echo "Triggering external trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
curl -iX POST \ curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \ -H "Authorization: token ${{ secrets.CR_PAT }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-d "{\"ref\":\"refs/heads/${br}\"}" \ -d "{\"ref\":\"refs/heads/${br}\"}" \
https://api.github.com/repos/linuxserver/docker-qbittorrent/actions/workflows/external_trigger.yml/dispatches https://api.github.com/repos/linuxserver/docker-qbittorrent/actions/workflows/external_trigger.yml/dispatches
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "Skipping branch due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi fi
else else
echo "**** ${br} is either a dev branch, or has no external version; skipping trigger. ****" echo "Skipping branch due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
fi fi
done done

View File

@@ -1,42 +0,0 @@
name: Package Trigger Main
on:
workflow_dispatch:
jobs:
package-trigger-master:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.1
- name: Package Trigger
if: github.ref == 'refs/heads/master'
run: |
if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_QBITTORRENT_MASTER }}" ]; then
echo "**** Github secret PAUSE_PACKAGE_TRIGGER_QBITTORRENT_MASTER is set; skipping trigger. ****"
echo "Github secret \`PAUSE_PACKAGE_TRIGGER_QBITTORRENT_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0
fi
if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****"
echo "There already seems to be an active build on Jenkins; skipping package trigger" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_QBITTORRENT_MASTER\". ****"
echo "Package trigger running off of master branch. To disable, set a Github secret named \`PAUSE_PACKAGE_TRIGGER_QBITTORRENT_MASTER\`" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/master/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
echo "**** Jenkins job queue url: ${response%$'\r'} ****"
echo "**** Sleeping 10 seconds until job starts ****"
sleep 10
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}"
echo "**** Jenkins job build url: ${buildurl} ****"
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****"
curl -iX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"

View File

@@ -14,37 +14,87 @@ jobs:
fetch-depth: '0' fetch-depth: '0'
- name: Package Trigger Scheduler - name: Package Trigger Scheduler
env:
SKIP_PACKAGE_TRIGGER: ${{ vars.SKIP_PACKAGE_TRIGGER }}
run: | run: |
echo "**** Branches found: ****" printf "# Package trigger scheduler for docker-qbittorrent\n\n" >> $GITHUB_STEP_SUMMARY
git for-each-ref --format='%(refname:short)' refs/remotes printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') if [[ "${br}" == "HEAD" ]]; then
echo "**** Evaluating branch ${br} ****" printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/jenkins-vars.yml | yq -r '.ls_branch') continue
if [ "${br}" == "${ls_branch}" ]; then fi
echo "**** Branch ${br} appears to be live; checking workflow. ****" printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then JENKINS_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/jenkins-vars.yml)
echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****" if ! curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/Jenkinsfile >/dev/null 2>&1; then
echo "Triggering package trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} " echo "> No Jenkinsfile found. Branch is either deprecated or is an early dev branch." >> $GITHUB_STEP_SUMMARY
curl -iX POST \ skipped_branches="${skipped_branches}${br} "
-H "Authorization: token ${{ secrets.CR_PAT }}" \ elif [[ "${br}" == $(yq -r '.ls_branch' <<< "${JENKINS_VARS}") ]]; then
-H "Accept: application/vnd.github.v3+json" \ echo "Branch appears to be live; checking workflow." >> $GITHUB_STEP_SUMMARY
-d "{\"ref\":\"refs/heads/${br}\"}" \ README_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-qbittorrent/${br}/readme-vars.yml)
https://api.github.com/repos/linuxserver/docker-qbittorrent/actions/workflows/package_trigger.yml/dispatches if [[ $(yq -r '.project_deprecation_status' <<< "${README_VARS}") == "true" ]]; then
sleep 30 echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Branch appears to be deprecated; skipping trigger." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif [[ $(yq -r '.skip_package_check' <<< "${JENKINS_VARS}") == "true" ]]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Skipping branch ${br} due to \`skip_package_check\` being set in \`jenkins-vars.yml\`." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif grep -q "^qbittorrent_${br}" <<< "${SKIP_PACKAGE_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_PACKAGE_TRIGGER\` contains \`qbittorrent_${br}\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/${br}/lastBuild/api/json | jq -r '.building' 2>/dev/null) == "true" ]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> There already seems to be an active build on Jenkins; skipping package trigger for ${br}" >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to no package trigger workflow present." >> $GITHUB_STEP_SUMMARY echo "> Triggering package trigger for branch ${br}" >> $GITHUB_STEP_SUMMARY
printf "> To disable, add \`qbittorrent_%s\` into the Github organizational variable \`SKIP_PACKAGE_TRIGGER\`.\n\n" "${br}" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} "
response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-qbittorrent/job/${br}/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
if [[ -z "${response}" ]]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Jenkins build could not be triggered. Skipping branch."
continue
fi
echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
sleep 10
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}"
echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
if ! curl -ifX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Unable to change the Jenkins job description."
fi
sleep 20
fi fi
else else
echo "**** ${br} appears to be a dev branch; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
fi fi
done done
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****" if [[ -n "${triggered_branches}" ]] || [[ -n "${skipped_branches}" ]]; then
echo "**** Notifying Discord ****" if [[ -n "${triggered_branches}" ]]; then
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, NOTIFY_BRANCHES="**Triggered:** ${triggered_branches} \n"
"description": "**Package Check Build(s) Triggered for qbittorrent** \n**Branch(es):** '"${triggered_branches}"' \n**Build URL:** '"https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-qbittorrent/activity/"' \n"}], NOTIFY_BUILD_URL="**Build URL:** https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-qbittorrent/activity/ \n"
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"
fi
if [[ -n "${skipped_branches}" ]]; then
NOTIFY_BRANCHES="${NOTIFY_BRANCHES}**Skipped:** ${skipped_branches} \n"
fi
echo "**** Notifying Discord ****"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Package Check Build(s) for qbittorrent** \n'"${NOTIFY_BRANCHES}"''"${NOTIFY_BUILD_URL}"'"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
fi

View File

@@ -5,6 +5,8 @@ on:
- '**/run' - '**/run'
- '**/finish' - '**/finish'
- '**/check' - '**/check'
- 'root/migrations/*'
jobs: jobs:
permission_check: permission_check:
uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1 uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1

View File

@@ -21,6 +21,7 @@ XDG_DATA_HOME="/config"
RUN \ RUN \
echo "**** install packages ****" && \ echo "**** install packages ****" && \
apk add --no-cache \ apk add --no-cache \
grep \
icu-libs \ icu-libs \
p7zip \ p7zip \
python3 \ python3 \
@@ -31,6 +32,18 @@ RUN \
fi && \ fi && \
apk add -U --upgrade --no-cache \ apk add -U --upgrade --no-cache \
qbittorrent-nox==${QBITTORRENT_VERSION} && \ qbittorrent-nox==${QBITTORRENT_VERSION} && \
echo "***** install qbitorrent-cli ****" && \
mkdir /qbt && \
if [ -z ${QBT_CLI_VERSION+x} ]; then \
QBT_CLI_VERSION=$(curl -sL "https://api.github.com/repos/fedarovich/qbittorrent-cli/releases/latest" \
| jq -r '. | .tag_name'); \
fi && \
curl -o \
/tmp/qbt.tar.gz -L \
"https://github.com/fedarovich/qbittorrent-cli/releases/download/${QBT_CLI_VERSION}/qbt-linux-alpine-x64-net6-${QBT_CLI_VERSION#v}.tar.gz" && \
tar xf \
/tmp/qbt.tar.gz -C \
/qbt && \
printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \ printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \
echo "**** cleanup ****" && \ echo "**** cleanup ****" && \
rm -rf \ rm -rf \

View File

@@ -21,6 +21,7 @@ XDG_DATA_HOME="/config"
RUN \ RUN \
echo "**** install packages ****" && \ echo "**** install packages ****" && \
apk add --no-cache \ apk add --no-cache \
grep \
icu-libs \ icu-libs \
p7zip \ p7zip \
python3 \ python3 \
@@ -31,6 +32,18 @@ RUN \
fi && \ fi && \
apk add -U --upgrade --no-cache \ apk add -U --upgrade --no-cache \
qbittorrent-nox==${QBITTORRENT_VERSION} && \ qbittorrent-nox==${QBITTORRENT_VERSION} && \
echo "***** install qbitorrent-cli ****" && \
mkdir /qbt && \
if [ -z ${QBT_CLI_VERSION+x} ]; then \
QBT_CLI_VERSION=$(curl -sL "https://api.github.com/repos/fedarovich/qbittorrent-cli/releases/latest" \
| jq -r '. | .tag_name'); \
fi && \
curl -o \
/tmp/qbt.tar.gz -L \
"https://github.com/fedarovich/qbittorrent-cli/releases/download/${QBT_CLI_VERSION}/qbt-linux-alpine-arm64-net6-${QBT_CLI_VERSION#v}.tar.gz" && \
tar xf \
/tmp/qbt.tar.gz -C \
/qbt && \
printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \ printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \
echo "**** cleanup ****" && \ echo "**** cleanup ****" && \
rm -rf \ rm -rf \

437
Jenkinsfile vendored
View File

@@ -8,7 +8,7 @@ pipeline {
} }
// Input to determine if this is a package check // Input to determine if this is a package check
parameters { parameters {
string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK') string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK')
} }
// Configuration for the variables used for this specific repo // Configuration for the variables used for this specific repo
environment { environment {
@@ -17,6 +17,8 @@ pipeline {
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0') GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
GITLAB_NAMESPACE=credentials('gitlab-namespace-id') GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat') DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat')
QUAYIO_API_TOKEN=credentials('quayio-repo-api-token')
GIT_SIGNING_KEY=credentials('484fbca6-9a4f-455e-b9e3-97ac98785f5f')
CONTAINER_NAME = 'qbittorrent' CONTAINER_NAME = 'qbittorrent'
BUILD_VERSION_ARG = 'QBITTORRENT_VERSION' BUILD_VERSION_ARG = 'QBITTORRENT_VERSION'
LS_USER = 'linuxserver' LS_USER = 'linuxserver'
@@ -39,15 +41,41 @@ pipeline {
CI_WEBPATH='' CI_WEBPATH=''
} }
stages { stages {
stage("Set git config"){
steps{
sh '''#!/bin/bash
cat ${GIT_SIGNING_KEY} > /config/.ssh/id_sign
chmod 600 /config/.ssh/id_sign
ssh-keygen -y -f /config/.ssh/id_sign > /config/.ssh/id_sign.pub
echo "Using $(ssh-keygen -lf /config/.ssh/id_sign) to sign commits"
git config --global gpg.format ssh
git config --global user.signingkey /config/.ssh/id_sign
git config --global commit.gpgsign true
'''
}
}
// Setup all the basic environment variables needed for the build // Setup all the basic environment variables needed for the build
stage("Set ENV Variables base"){ stage("Set ENV Variables base"){
steps{ steps{
echo "Running on node: ${NODE_NAME}"
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) echo "Pruning builder"
docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi
done
fi fi
docker system prune -af --volumes || : ''' docker system prune -f --volumes || :
docker image prune -af || :
'''
script{ script{
env.EXIT_STATUS = '' env.EXIT_STATUS = ''
env.LS_RELEASE = sh( env.LS_RELEASE = sh(
@@ -68,7 +96,7 @@ pipeline {
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
env.PULL_REQUEST = env.CHANGE_ID env.PULL_REQUEST = env.CHANGE_ID
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml' env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml'
} }
sh '''#! /bin/bash sh '''#! /bin/bash
echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" ''' echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" '''
@@ -179,6 +207,7 @@ pipeline {
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -203,6 +232,7 @@ pipeline {
env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -227,6 +257,7 @@ pipeline {
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -249,7 +280,7 @@ pipeline {
-v ${WORKSPACE}:/mnt \ -v ${WORKSPACE}:/mnt \
-e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \ -e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \
-e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \ -e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \
ghcr.io/linuxserver/baseimage-alpine:3.19 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\ ghcr.io/linuxserver/baseimage-alpine:3.20 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\
apk add --no-cache python3 && \ apk add --no-cache python3 && \
python3 -m venv /lsiopy && \ python3 -m venv /lsiopy && \
pip install --no-cache-dir -U pip && \ pip install --no-cache-dir -U pip && \
@@ -299,7 +330,7 @@ pipeline {
echo "Jenkinsfile is up to date." echo "Jenkinsfile is up to date."
fi fi
echo "Starting Stage 2 - Delete old templates" echo "Starting Stage 2 - Delete old templates"
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml" OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml .github/workflows/package_trigger.yml"
for i in ${OLD_TEMPLATES}; do for i in ${OLD_TEMPLATES}; do
if [[ -f "${i}" ]]; then if [[ -f "${i}" ]]; then
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}" TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
@@ -323,6 +354,35 @@ pipeline {
else else
echo "No templates to delete" echo "No templates to delete"
fi fi
echo "Starting Stage 2.5 - Update init diagram"
if ! grep -q 'init_diagram:' readme-vars.yml; then
echo "Adding the key 'init_diagram' to readme-vars.yml"
sed -i '\\|^#.*changelog.*$|d' readme-vars.yml
sed -i 's|^changelogs:|# init diagram\\ninit_diagram:\\n\\n# changelog\\nchangelogs:|' readme-vars.yml
fi
mkdir -p ${TEMPDIR}/d2
docker run --rm -v ${TEMPDIR}/d2:/output -e PUID=$(id -u) -e PGID=$(id -g) -e RAW="true" ghcr.io/linuxserver/d2-builder:latest ${CONTAINER_NAME}:latest
ls -al ${TEMPDIR}/d2
yq -ei ".init_diagram |= load_str(\\"${TEMPDIR}/d2/${CONTAINER_NAME}-latest.d2\\")" readme-vars.yml
if [[ $(md5sum readme-vars.yml | cut -c1-8) != $(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/readme-vars.yml | cut -c1-8) ]]; then
echo "'init_diagram' has been updated. Updating repo and exiting build, new one will trigger based on commit."
mkdir -p ${TEMPDIR}/repo
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
cd ${TEMPDIR}/repo/${LS_REPO}
git checkout -f master
cp ${WORKSPACE}/readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/readme-vars.yml
git add readme-vars.yml
git commit -m 'Bot Updating Templated Files'
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR}
exit 0
else
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Init diagram is unchanged"
fi
echo "Starting Stage 3 - Update templates" echo "Starting Stage 3 - Update templates"
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
cd ${TEMPDIR}/docker-${CONTAINER_NAME} cd ${TEMPDIR}/docker-${CONTAINER_NAME}
@@ -356,7 +416,7 @@ pipeline {
fi fi
echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub" echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub"
mkdir -p ${TEMPDIR}/docs mkdir -p ${TEMPDIR}/docs
git clone https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation git clone --depth=1 https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/
cd ${TEMPDIR}/docs/docker-documentation cd ${TEMPDIR}/docs/docker-documentation
@@ -374,8 +434,8 @@ pipeline {
echo "Docs update not needed, skipping" echo "Docs update not needed, skipping"
fi fi
mkdir -p ${TEMPDIR}/unraid mkdir -p ${TEMPDIR}/unraid
git clone https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates git clone --depth=1 https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates
git clone https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates git clone --depth=1 https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates
if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then
@@ -385,9 +445,9 @@ pipeline {
echo "Updating Unraid template" echo "Updating Unraid template"
cd ${TEMPDIR}/unraid/templates/ cd ${TEMPDIR}/unraid/templates/
GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||') GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
if grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then if grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then
echo "Image is on the ignore list, and already in the deprecation folder." echo "Image is on the ignore list, and already in the deprecation folder."
elif grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then elif grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then
echo "Image is on the ignore list, marking Unraid template as deprecated" echo "Image is on the ignore list, marking Unraid template as deprecated"
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/
git add -u unraid/${CONTAINER_NAME}.xml git add -u unraid/${CONTAINER_NAME}.xml
@@ -480,10 +540,10 @@ pipeline {
} }
} }
/* ####################### /* #######################
GitLab Mirroring GitLab Mirroring and Quay.io Repo Visibility
####################### */ ####################### */
// Ping into Gitlab to mirror this repo and have a registry endpoint // Ping into Gitlab to mirror this repo and have a registry endpoint & mark this repo on Quay.io as public
stage("GitLab Mirror"){ stage("GitLab Mirror and Quay.io Visibility"){
when { when {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
@@ -499,6 +559,8 @@ pipeline {
"visibility":"public"}' ''' "visibility":"public"}' '''
sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \ sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \
-d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" ''' -d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" '''
sh '''curl -H "Content-Type: application/json" -H "Authorization: Bearer ${QUAYIO_API_TOKEN}" -X POST "https://quay.io/api/v1/repository${QUAYIMAGE/quay.io/}/changevisibility" \
-d '{"visibility":"public"}' ||: '''
} }
} }
/* ############### /* ###############
@@ -529,8 +591,42 @@ pipeline {
--label \"org.opencontainers.image.title=Qbittorrent\" \ --label \"org.opencontainers.image.title=Qbittorrent\" \
--label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \ --label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \
--no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
// Build MultiArch Docker containers for push to LS Repo // Build MultiArch Docker containers for push to LS Repo
@@ -561,8 +657,42 @@ pipeline {
--label \"org.opencontainers.image.title=Qbittorrent\" \ --label \"org.opencontainers.image.title=Qbittorrent\" \
--label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \ --label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \
--no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
stage('Build ARM64') { stage('Build ARM64') {
@@ -571,10 +701,6 @@ pipeline {
} }
steps { steps {
echo "Running on node: ${NODE_NAME}" echo "Running on node: ${NODE_NAME}"
echo 'Logging into Github'
sh '''#! /bin/bash
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
'''
sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64" sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64"
sh "docker buildx build \ sh "docker buildx build \
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
@@ -590,18 +716,50 @@ pipeline {
--label \"org.opencontainers.image.title=Qbittorrent\" \ --label \"org.opencontainers.image.title=Qbittorrent\" \
--label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \ --label \"org.opencontainers.image.description=The [Qbittorrent](https://www.qbittorrent.org/) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library.\" \
--no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh "docker tag ${IMAGE}:arm64v8-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" sh '''#! /bin/bash
retry(5) { set -e
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
} }
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) containers=$(docker ps -aq)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} docker stop ${containers}
fi fi
docker system prune -af --volumes || : ''' docker system prune -f --volumes || :
docker image prune -af || :
'''
} }
} }
} }
@@ -705,9 +863,17 @@ pipeline {
} }
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
if grep -q 'docker-baseimage' <<< "${LS_REPO}"; then
echo "Detected baseimage, setting LSIO_FIRST_PARTY=true"
if [ -n "${CI_DOCKERENV}" ]; then
CI_DOCKERENV="LSIO_FIRST_PARTY=true|${CI_DOCKERENV}"
else
CI_DOCKERENV="LSIO_FIRST_PARTY=true"
fi
fi
docker pull ghcr.io/linuxserver/ci:latest docker pull ghcr.io/linuxserver/ci:latest
if [ "${MULTIARCH}" == "true" ]; then if [ "${MULTIARCH}" == "true" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
docker run --rm \ docker run --rm \
@@ -717,6 +883,7 @@ pipeline {
-e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \ -e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \
-e TAGS=\"${CI_TAGS}\" \ -e TAGS=\"${CI_TAGS}\" \
-e META_TAG=\"${META_TAG}\" \ -e META_TAG=\"${META_TAG}\" \
-e RELEASE_TAG=\"latest\" \
-e PORT=\"${CI_PORT}\" \ -e PORT=\"${CI_PORT}\" \
-e SSL=\"${CI_SSL}\" \ -e SSL=\"${CI_SSL}\" \
-e BASE=\"${DIST_IMAGE}\" \ -e BASE=\"${DIST_IMAGE}\" \
@@ -726,6 +893,7 @@ pipeline {
-e WEB_SCREENSHOT=\"${CI_WEB}\" \ -e WEB_SCREENSHOT=\"${CI_WEB}\" \
-e WEB_AUTH=\"${CI_AUTH}\" \ -e WEB_AUTH=\"${CI_AUTH}\" \
-e WEB_PATH=\"${CI_WEBPATH}\" \ -e WEB_PATH=\"${CI_WEBPATH}\" \
-e NODE_NAME=\"${NODE_NAME}\" \
-t ghcr.io/linuxserver/ci:latest \ -t ghcr.io/linuxserver/ci:latest \
python3 test_build.py''' python3 test_build.py'''
} }
@@ -741,37 +909,23 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[ sh '''#! /bin/bash
$class: 'UsernamePasswordMultiBinding', set -e
credentialsId: 'Quay.io-Robot', for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
usernameVariable: 'QUAYUSER', [[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}"
passwordVariable: 'QUAYPASS' IFS=',' read -ra CACHE <<< "$BUILDCACHE"
] for i in "${CACHE[@]}"; do
]) { if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
retry(5) { CACHEIMAGE=${i}
sh '''#! /bin/bash fi
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
for PUSHIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do
docker tag ${IMAGE}:${META_TAG} ${PUSHIMAGE}:${META_TAG}
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:latest
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${SEMVER}
fi
docker push ${PUSHIMAGE}:latest
docker push ${PUSHIMAGE}:${META_TAG}
docker push ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker push ${PUSHIMAGE}:${SEMVER}
fi
done done
''' docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:latest -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
} if [ -n "${SEMVER}" ]; then
docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
fi
done
'''
} }
} }
} }
@@ -782,57 +936,34 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[ sh '''#! /bin/bash
$class: 'UsernamePasswordMultiBinding', set -e
credentialsId: 'Quay.io-Robot', for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
usernameVariable: 'QUAYUSER', [[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}"
passwordVariable: 'QUAYPASS' IFS=',' read -ra CACHE <<< "$BUILDCACHE"
] for i in "${CACHE[@]}"; do
]) { if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
retry(5) { CACHEIMAGE=${i}
sh '''#! /bin/bash fi
set -e done
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-latest -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-latest -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin if [ -n "${SEMVER}" ]; then
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
if [ "${CI}" == "false" ]; then docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do done
docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} if [ -n "${SEMVER}" ]; then
if [ -n "${SEMVER}" ]; then docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER} fi
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER} done
fi '''
docker push ${MANIFESTIMAGE}:amd64-${META_TAG}
docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
docker push ${MANIFESTIMAGE}:amd64-latest
docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker push ${MANIFESTIMAGE}:arm64v8-latest
docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker push ${MANIFESTIMAGE}:amd64-${SEMVER}
docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi
done
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi
done
'''
}
} }
} }
} }
@@ -853,14 +984,14 @@ pipeline {
"object": "'${COMMIT_SHA}'",\ "object": "'${COMMIT_SHA}'",\
"message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\ "message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\
"type": "commit",\ "type": "commit",\
"tagger": {"name": "LinuxServer Jenkins","email": "jenkins@linuxserver.io","date": "'${GITHUB_DATE}'"}}' ''' "tagger": {"name": "LinuxServer-CI","email": "ci@linuxserver.io","date": "'${GITHUB_DATE}'"}}' '''
echo "Pushing New release for Tag" echo "Pushing New release for Tag"
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Updating external repo packages to ${EXT_RELEASE_CLEAN}" > releasebody.json echo "Updating external repo packages to ${EXT_RELEASE_CLEAN}" > releasebody.json
echo '{"tag_name":"'${META_TAG}'",\ echo '{"tag_name":"'${META_TAG}'",\
"target_commitish": "master",\ "target_commitish": "master",\
"name": "'${META_TAG}'",\ "name": "'${META_TAG}'",\
"body": "**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Repo Changes:**\\n\\n' > start "body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start
printf '","draft": false,"prerelease": false}' >> releasebody.json printf '","draft": false,"prerelease": false}' >> releasebody.json
paste -d'\\0' start releasebody.json > releasebody.json.done paste -d'\\0' start releasebody.json > releasebody.json.done
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
@@ -985,32 +1116,94 @@ EOF
###################### */ ###################### */
post { post {
always { always {
sh '''#!/bin/bash
rm -rf /config/.ssh/id_sign
rm -rf /config/.ssh/id_sign.pub
git config --global --unset gpg.format
git config --global --unset user.signingkey
git config --global --unset commit.gpgsign
'''
script{ script{
env.JOB_DATE = sh(
script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''',
returnStdout: true).trim()
if (env.EXIT_STATUS == "ABORTED"){ if (env.EXIT_STATUS == "ABORTED"){
sh 'echo "build aborted"' sh 'echo "build aborted"'
} }else{
else if (currentBuild.currentResult == "SUCCESS"){ if (currentBuild.currentResult == "SUCCESS"){
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 1681177,\ if (env.GITHUBIMAGE =~ /lspipepr/){
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** Success\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ env.JOB_WEBHOOK_STATUS='Success'
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' env.JOB_WEBHOOK_COLOUR=3957028
} env.JOB_WEBHOOK_FOOTER='PR Build'
else { }else if (env.GITHUBIMAGE =~ /lsiodev/){
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 16711680,\ env.JOB_WEBHOOK_STATUS='Success'
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** failure\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ env.JOB_WEBHOOK_COLOUR=3957028
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Success'
env.JOB_WEBHOOK_COLOUR=1681177
env.JOB_WEBHOOK_FOOTER='Live Build'
}
}else{
if (env.GITHUBIMAGE =~ /lspipepr/){
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='PR Build'
}else if (env.GITHUBIMAGE =~ /lsiodev/){
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=16711680
env.JOB_WEBHOOK_FOOTER='Live Build'
}
}
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"'color'": '${JOB_WEBHOOK_COLOUR}',\
"footer": {"text" : "'"${JOB_WEBHOOK_FOOTER}"'"},\
"timestamp": "'${JOB_DATE}'",\
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** '${JOB_WEBHOOK_STATUS}'\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' "username": "Jenkins"}' ${BUILDS_DISCORD} '''
} }
} }
} }
cleanup { cleanup {
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Performing docker system prune!!" echo "Pruning builder!!"
containers=$(docker ps -aq) docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi
done
fi fi
docker system prune -af --volumes || : docker system prune -f --volumes || :
docker image prune -af || :
''' '''
cleanWs() cleanWs()
} }
} }
} }
def retry_backoff(int max_attempts, int power_base, Closure c) {
int n = 0
while (n < max_attempts) {
try {
c()
return
} catch (err) {
if ((n + 1) >= max_attempts) {
throw err
}
sleep(power_base ** n)
n++
}
}
return
}

View File

@@ -3,7 +3,7 @@
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io) [![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io)
[![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!") [![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!")
[![Discord](https://img.shields.io/discord/354974912613449730.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Discord&logo=discord)](https://discord.gg/YWrKVTn "realtime support / chat with the community and the team.") [![Discord](https://img.shields.io/discord/354974912613449730.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Discord&logo=discord)](https://linuxserver.io/discord "realtime support / chat with the community and the team.")
[![Discourse](https://img.shields.io/discourse/https/discourse.linuxserver.io/topics.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&logo=discourse)](https://discourse.linuxserver.io "post on our community forum.") [![Discourse](https://img.shields.io/discourse/https/discourse.linuxserver.io/topics.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&logo=discourse)](https://discourse.linuxserver.io "post on our community forum.")
[![Fleet](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Fleet)](https://fleet.linuxserver.io "an online web interface which displays all of our maintained images.") [![Fleet](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Fleet)](https://fleet.linuxserver.io "an online web interface which displays all of our maintained images.")
[![GitHub](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=GitHub&logo=github)](https://github.com/linuxserver "view the source for all of our repositories.") [![GitHub](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=GitHub&logo=github)](https://github.com/linuxserver "view the source for all of our repositories.")
@@ -20,7 +20,7 @@ The [LinuxServer.io](https://linuxserver.io) team brings you another container r
Find us at: Find us at:
* [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more! * [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more!
* [Discord](https://discord.gg/YWrKVTn) - realtime support / chat with the community and the team. * [Discord](https://linuxserver.io/discord) - realtime support / chat with the community and the team.
* [Discourse](https://discourse.linuxserver.io) - post on our community forum. * [Discourse](https://discourse.linuxserver.io) - post on our community forum.
* [Fleet](https://fleet.linuxserver.io) - an online web interface which displays all of our maintained images. * [Fleet](https://fleet.linuxserver.io) - an online web interface which displays all of our maintained images.
* [GitHub](https://github.com/linuxserver) - view the source for all of our repositories. * [GitHub](https://github.com/linuxserver) - view the source for all of our repositories.
@@ -78,7 +78,7 @@ If you are running a very old (3.x) kernel you may run into [this issue](https:/
Due to issues with CSRF and port mapping, should you require to alter the port for the web UI you need to change both sides of the -p 8080 switch AND set the WEBUI_PORT variable to the new port. Due to issues with CSRF and port mapping, should you require to alter the port for the web UI you need to change both sides of the -p 8080 switch AND set the WEBUI_PORT variable to the new port.
For example, to set the port to 8090 you need to set -p 8090:8090 and -e WEBUI_PORT=8090 For example, to set the port to 8123 you need to set -p 8123:8123 and -e WEBUI_PORT=8123
### TORRENTING_PORT ### TORRENTING_PORT
@@ -86,10 +86,21 @@ A bittorrent client can be an active or a passive node. Running your client as a
Similarly to the WEBUI_PORT, to set the port to 6887 you need to pass -p 6887:6887, -p 6887:6887/udp and -e TORRENTING_PORT=6887 arguments to Docker. Similarly to the WEBUI_PORT, to set the port to 6887 you need to pass -p 6887:6887, -p 6887:6887/udp and -e TORRENTING_PORT=6887 arguments to Docker.
## Read-Only Operation
This image can be run with a read-only container filesystem. For details please [read the docs](https://docs.linuxserver.io/misc/read-only/).
## Non-Root Operation
This image can be run with a non-root user. For details please [read the docs](https://docs.linuxserver.io/misc/non-root/).
## Usage ## Usage
To help you get started creating a container from this image you can either use docker-compose or the docker cli. To help you get started creating a container from this image you can either use docker-compose or the docker cli.
>[!NOTE]
>Unless a parameter is flaged as 'optional', it is *mandatory* and a value must be provided.
### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose)) ### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose))
```yaml ```yaml
@@ -139,9 +150,9 @@ Containers are configured using parameters passed at runtime (such as those abov
| Parameter | Function | | Parameter | Function |
| :----: | --- | | :----: | --- |
| `-p 8080` | WebUI | | `-p 8080:8080` | WebUI |
| `-p 6881` | tcp connection port | | `-p 6881:6881` | tcp connection port |
| `-p 6881/udp` | udp connection port | | `-p 6881:6881/udp` | udp connection port |
| `-e PUID=1000` | for UserID - see below for explanation | | `-e PUID=1000` | for UserID - see below for explanation |
| `-e PGID=1000` | for GroupID - see below for explanation | | `-e PGID=1000` | for GroupID - see below for explanation |
| `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). | | `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). |
@@ -149,6 +160,8 @@ Containers are configured using parameters passed at runtime (such as those abov
| `-e TORRENTING_PORT=6881` | for changing the port of tcp/udp connection, see below for explanation | | `-e TORRENTING_PORT=6881` | for changing the port of tcp/udp connection, see below for explanation |
| `-v /config` | Contains all relevant configuration files. | | `-v /config` | Contains all relevant configuration files. |
| `-v /downloads` | Location of downloads on disk. | | `-v /downloads` | Location of downloads on disk. |
| `--read-only=true` | Run container with a read-only filesystem. Please [read the docs](https://docs.linuxserver.io/misc/read-only/). |
| `--user=1000:1000` | Run container with a non-root user. Please [read the docs](https://docs.linuxserver.io/misc/non-root/). |
## Environment variables from files (Docker secrets) ## Environment variables from files (Docker secrets)
@@ -286,7 +299,8 @@ Below are the instructions for updating containers:
### Image Update Notifications - Diun (Docker Image Update Notifier) ### Image Update Notifications - Diun (Docker Image Update Notifier)
**tip**: We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported. >[!TIP]
>We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported.
## Building locally ## Building locally
@@ -301,16 +315,17 @@ docker build \
-t lscr.io/linuxserver/qbittorrent:latest . -t lscr.io/linuxserver/qbittorrent:latest .
``` ```
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
```bash ```bash
docker run --rm --privileged multiarch/qemu-user-static:register --reset docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
``` ```
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
## Versions ## Versions
* **17.07.24:** - Restore qbittorrent-cli as it now supports openssl 3.
* **25.05.24:** - Remove qbittorrent-cli as it still requires openssl 1.1 which is EOL. * **25.05.24:** - Remove qbittorrent-cli as it still requires openssl 1.1 which is EOL.
* **14.02.24:** - Only set/override torrenting port if the optional env var is set. * **14.02.24:** - Only set/override torrenting port if the optional env var is set.
* **14.02.24:** - Add torrenting port support. * **14.02.24:** - Add torrenting port support.

View File

@@ -1,83 +1,99 @@
NAME VERSION TYPE NAME VERSION TYPE
7zip 23.01-r0 apk 7zip 24.09-r0 apk
alpine-baselayout 3.6.5-r0 apk Alba.CsConsoleFormat 1.0.0 dotnet
alpine-baselayout-data 3.6.5-r0 apk BencodeNET 2.3.0 dotnet
alpine-keys 2.4-r1 apk CsvHelper 12.1.2 dotnet
apk-tools 2.14.4-r1 apk IPNetwork2 2.5.235 dotnet
bash 5.2.26-r0 apk McMaster.Extensions.CommandLineUtils 2.4.4 dotnet
brotli-libs 1.1.0-r2 apk Mono.Posix.NETStandard 1.0.0 dotnet
busybox 1.36.1-r31 apk NJsonSchema 9.14.1 dotnet
busybox-binsh 1.36.1-r31 apk Newtonsoft.Json 13.0.3 dotnet
c-ares 1.32.1-r0 apk Portable.BouncyCastle 1.8.8 dotnet
ca-certificates 20240705-r0 apk Portable.Xaml 0.18.0 dotnet
ca-certificates-bundle 20240226-r0 apk QBittorrent.Client 1.9.24285.1 dotnet
catatonit 0.2.0-r0 apk System.Security.Cryptography.ProtectedData 5.0.0 dotnet
coreutils 9.5-r1 apk acl-libs 2.3.2-r1 apk
coreutils-env 9.5-r1 apk alpine-baselayout 3.7.0-r0 apk
coreutils-fmt 9.5-r1 apk alpine-baselayout-data 3.7.0-r0 apk
coreutils-sha512sum 9.5-r1 apk alpine-keys 2.5-r0 apk
curl 8.8.0-r0 apk apk-tools 2.14.9-r1 apk
dbus-libs 1.14.10-r3 apk bash 5.2.37-r0 apk
double-conversion 3.3.0-r0 apk brotli-libs 1.1.0-r2 apk
duktape 2.7.0-r1 apk busybox 1.37.0-r16 apk
findutils 4.9.0-r5 apk busybox-binsh 1.37.0-r16 apk
gdbm 1.24-r0 apk c-ares 1.34.5-r0 apk
glib 2.80.3-r0 apk ca-certificates 20241121-r2 apk
icu-data-en 74.2-r0 apk ca-certificates-bundle 20241121-r2 apk
icu-libs 74.2-r0 apk catatonit 0.2.1-r0 apk
jq 1.7.1-r0 apk coreutils 9.7-r1 apk
libacl 2.3.2-r0 apk coreutils-env 9.7-r1 apk
libattr 2.5.2-r0 apk coreutils-fmt 9.7-r1 apk
libb2 0.98.1-r3 apk coreutils-sha512sum 9.7-r1 apk
libblkid 2.40.1-r1 apk curl 8.13.0-r1 apk
libbsd 0.12.2-r0 apk dbus-libs 1.16.2-r1 apk
libbz2 1.0.8-r6 apk double-conversion 3.3.1-r0 apk
libcrypto3 3.3.1-r2 apk duktape-libs 2.7.0-r2 apk
libcurl 8.8.0-r0 apk findutils 4.10.0-r0 apk
libeconf 0.6.3-r0 apk gdbm 1.24-r0 apk
libexpat 2.6.2-r0 apk glib 2.84.1-r0 apk
libffi 3.4.6-r0 apk grep 3.12-r0 apk
libgcc 13.2.1_git20240309-r0 apk icu-data-en 76.1-r0 apk
libgomp 13.2.1_git20240309-r0 apk icu-libs 76.1-r0 apk
libidn2 2.3.7-r0 apk jq 1.7.1-r0 apk
libintl 0.22.5-r0 apk libapk2 2.14.9-r1 apk
libmd 1.1.0-r0 apk libattr 2.5.2-r2 apk
libmount 2.40.1-r1 apk libb2 0.98.1-r3 apk
libncursesw 6.5_p20240601-r0 apk libblkid 2.41-r4 apk
libpanelw 6.5_p20240601-r0 apk libbsd 0.12.2-r0 apk
libpcre2-16 10.43-r0 apk libbz2 1.0.8-r6 apk
libproc2 4.0.4-r0 apk libcrypto3 3.5.0-r0 apk
libproxy 0.5.7-r0 apk libcurl 8.13.0-r1 apk
libpsl 0.21.5-r2 apk libeconf 0.6.3-r0 apk
libssl3 3.3.1-r2 apk libexpat 2.7.1-r0 apk
libstdc++ 13.2.1_git20240309-r0 apk libffi 3.4.8-r0 apk
libtorrent-rasterbar 2.0.10-r2 apk libgcc 14.2.0-r5 apk
libunistring 1.2-r0 apk libgomp 14.2.0-r5 apk
linux-pam 1.6.0-r0 apk libidn2 2.3.7-r0 apk
mpdecimal 4.0.0-r0 apk libintl 0.24-r0 apk
musl 1.2.5-r1 apk libmd 1.1.0-r0 apk
musl-utils 1.2.5-r1 apk libmount 2.41-r4 apk
ncurses-terminfo-base 6.5_p20240601-r0 apk libncursesw 6.5_p20250405-r0 apk
netcat-openbsd 1.226-r0 apk libpanelw 6.5_p20250405-r0 apk
nghttp2-libs 1.62.1-r0 apk libpcre2-16 10.43-r1 apk
oniguruma 6.9.9-r0 apk libproc2 4.0.4-r2 apk
pcre2 10.43-r0 apk libproxy 0.5.9-r0 apk
procps-ng 4.0.4-r0 apk libpsl 0.21.5-r3 apk
pyc 3.12.3-r1 apk libssl3 3.5.0-r0 apk
python3 3.12.3-r1 apk libstdc++ 14.2.0-r5 apk
python3-pyc 3.12.3-r1 apk libtorrent-rasterbar 2.0.11-r0 apk
python3-pycache-pyc0 3.12.3-r1 apk libunistring 1.3-r0 apk
qbittorrent-nox 4.6.5-r0 apk linux-pam 1.7.0-r2 apk
qt6-qtbase 6.6.3-r0 apk mpdecimal 4.0.0-r0 apk
qt6-qtbase-sqlite 6.6.3-r0 apk musl 1.2.5-r10 apk
readline 8.2.10-r0 apk musl-utils 1.2.5-r10 apk
scanelf 1.3.7-r2 apk ncurses-terminfo-base 6.5_p20250405-r0 apk
shadow 4.15.1-r0 apk netcat-openbsd 1.228.1-r0 apk
skalibs 2.14.2.0-r0 apk nghttp2-libs 1.65.0-r0 apk
sqlite-libs 3.46.0-r0 apk oniguruma 6.9.10-r0 apk
ssl_client 1.36.1-r31 apk pcre2 10.43-r1 apk
tzdata 2024a-r1 apk procps-ng 4.0.4-r2 apk
utmps-libs 0.1.2.2-r1 apk pyc 3.12.10-r0 apk
xz-libs 5.6.2-r0 apk python3 3.12.10-r0 apk
zlib 1.3.1-r1 apk python3-pyc 3.12.10-r0 apk
zstd-libs 1.5.6-r0 apk python3-pycache-pyc0 3.12.10-r0 apk
qbittorrent-nox 5.0.4-r0 apk
qbt 1.8.24285.1 dotnet
qt6-qtbase 6.8.2-r1 apk
qt6-qtbase-sqlite 6.8.2-r1 apk
readline 8.2.13-r0 apk
runtimepack.Microsoft.NETCore.App.Runtime.linux-musl-x64 6.0.35 dotnet
scanelf 1.3.8-r1 apk
shadow 4.17.3-r0 apk
skalibs-libs 2.14.3.0-r0 apk
sqlite-libs 3.49.1-r0 apk
ssl_client 1.37.0-r16 apk
tzdata 2025b-r0 apk
utmps-libs 0.1.2.3-r2 apk
xz-libs 5.8.1-r0 apk
zlib 1.3.1-r2 apk
zstd-libs 1.5.7-r0 apk

View File

@@ -6,39 +6,36 @@ project_url: "https://www.qbittorrent.org/"
project_logo: "https://github.com/linuxserver/docker-templates/raw/master/linuxserver.io/img/qbittorrent-icon.png" project_logo: "https://github.com/linuxserver/docker-templates/raw/master/linuxserver.io/img/qbittorrent-icon.png"
project_blurb: "The [{{ project_name|capitalize }}]({{ project_url }}) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library." project_blurb: "The [{{ project_name|capitalize }}]({{ project_url }}) project aims to provide an open-source software alternative to µTorrent. qBittorrent is based on the Qt toolkit and libtorrent-rasterbar library."
project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}" project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}"
project_categories: "Downloaders"
# supported architectures # supported architectures
available_architectures: available_architectures:
- { arch: "{{ arch_x86_64 }}", tag: "amd64-latest"} - {arch: "{{ arch_x86_64 }}", tag: "amd64-latest"}
- { arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"} - {arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"}
# development version # development version
development_versions: true development_versions: true
development_versions_items: development_versions_items:
- { tag: "latest", desc: "Stable qbittorrent releases" } - {tag: "latest", desc: "Stable qbittorrent releases"}
- { tag: "libtorrentv1", desc: "Static qbittorrent builds using libtorrent v1" } - {tag: "libtorrentv1", desc: "Static qbittorrent builds using libtorrent v1"}
# container parameters # container parameters
common_param_env_vars_enabled: true common_param_env_vars_enabled: true
param_container_name: "{{ project_name }}" param_container_name: "{{ project_name }}"
param_usage_include_vols: true param_usage_include_vols: true
param_volumes: param_volumes:
- { vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/appdata", desc: "Contains all relevant configuration files." } - {vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/appdata", desc: "Contains all relevant configuration files."}
param_usage_include_ports: true param_usage_include_ports: true
param_ports: param_ports:
- { external_port: "8080", internal_port: "8080", port_desc: "WebUI" } - {external_port: "8080", internal_port: "8080", port_desc: "WebUI"}
- { external_port: "6881", internal_port: "6881", port_desc: "tcp connection port" } - {external_port: "6881", internal_port: "6881", port_desc: "tcp connection port"}
- { external_port: "6881", internal_port: "6881/udp", port_desc: "udp connection port" } - {external_port: "6881", internal_port: "6881/udp", port_desc: "udp connection port"}
param_usage_include_env: true param_usage_include_env: true
param_env_vars: param_env_vars:
- { env_var: "WEBUI_PORT", env_value: "8080", desc: "for changing the port of the web UI, see below for explanation"} - {env_var: "WEBUI_PORT", env_value: "8080", desc: "for changing the port of the web UI, see below for explanation"}
- { env_var: "TORRENTING_PORT", env_value: "6881", desc: "for changing the port of tcp/udp connection, see below for explanation" } - {env_var: "TORRENTING_PORT", env_value: "6881", desc: "for changing the port of tcp/udp connection, see below for explanation"}
opt_param_usage_include_vols: true opt_param_usage_include_vols: true
opt_param_volumes: opt_param_volumes:
- { vol_path: "/downloads", vol_host_path: "/path/to/downloads", desc: "Location of downloads on disk." } - {vol_path: "/downloads", vol_host_path: "/path/to/downloads", desc: "Location of downloads on disk."}
readonly_supported: true readonly_supported: true
nonroot_supported: true
# application setup block # application setup block
app_setup_block_enabled: true app_setup_block_enabled: true
app_setup_block: | app_setup_block: |
@@ -52,55 +49,94 @@ app_setup_block: |
Due to issues with CSRF and port mapping, should you require to alter the port for the web UI you need to change both sides of the -p 8080 switch AND set the WEBUI_PORT variable to the new port. Due to issues with CSRF and port mapping, should you require to alter the port for the web UI you need to change both sides of the -p 8080 switch AND set the WEBUI_PORT variable to the new port.
For example, to set the port to 8090 you need to set -p 8090:8090 and -e WEBUI_PORT=8090 For example, to set the port to 8123 you need to set -p 8123:8123 and -e WEBUI_PORT=8123
### TORRENTING_PORT ### TORRENTING_PORT
A bittorrent client can be an active or a passive node. Running your client as an active node has the advantage of being able to connect to both active and passive peers, and can potentially increase the number of incoming connections. This requires an open port on the host machine which might differ from container's internal one. A bittorrent client can be an active or a passive node. Running your client as an active node has the advantage of being able to connect to both active and passive peers, and can potentially increase the number of incoming connections. This requires an open port on the host machine which might differ from container's internal one.
Similarly to the WEBUI_PORT, to set the port to 6887 you need to pass -p 6887:6887, -p 6887:6887/udp and -e TORRENTING_PORT=6887 arguments to Docker. Similarly to the WEBUI_PORT, to set the port to 6887 you need to pass -p 6887:6887, -p 6887:6887/udp and -e TORRENTING_PORT=6887 arguments to Docker.
# init diagram
init_diagram: |
"qbittorrent:latest": {
docker-mods
base {
fix-attr +\nlegacy cont-init
}
docker-mods -> base
legacy-services
custom services
init-services -> legacy-services
init-services -> custom services
custom services -> legacy-services
legacy-services -> ci-service-check
init-migrations -> init-adduser
init-os-end -> init-config
init-config -> init-config-end
init-crontab-config -> init-config-end
init-qbittorrent-config -> init-config-end
init-config -> init-crontab-config
init-mods-end -> init-custom-files
base -> init-envfile
base -> init-migrations
init-config-end -> init-mods
init-mods-package-install -> init-mods-end
init-mods -> init-mods-package-install
init-adduser -> init-os-end
init-envfile -> init-os-end
init-config -> init-qbittorrent-config
init-custom-files -> init-services
init-services -> svc-cron
svc-cron -> legacy-services
init-services -> svc-qbittorrent
svc-qbittorrent -> legacy-services
}
Base Images: {
"baseimage-alpine:edge"
}
"qbittorrent:latest" <- Base Images
# changelog # changelog
changelogs: changelogs:
- { date: "25.05.24:", desc: "Remove qbittorrent-cli as it still requires openssl 1.1 which is EOL." } - {date: "17.07.24:", desc: "Restore qbittorrent-cli as it now supports openssl 3."}
- { date: "14.02.24:", desc: "Only set/override torrenting port if the optional env var is set." } - {date: "25.05.24:", desc: "Remove qbittorrent-cli as it still requires openssl 1.1 which is EOL."}
- { date: "14.02.24:", desc: "Add torrenting port support." } - {date: "14.02.24:", desc: "Only set/override torrenting port if the optional env var is set."}
- { date: "31.01.24:", desc: "Remove obsolete compat packages."} - {date: "14.02.24:", desc: "Add torrenting port support."}
- { date: "25.12.23:", desc: "Only pull stable releases of qbittorrent-cli."} - {date: "31.01.24:", desc: "Remove obsolete compat packages."}
- { date: "07.10.23:", desc: "Install unrar from [linuxserver repo](https://github.com/linuxserver/docker-unrar)."} - {date: "25.12.23:", desc: "Only pull stable releases of qbittorrent-cli."}
- { date: "10.08.23:", desc: "Bump unrar to 6.2.10."} - {date: "07.10.23:", desc: "Install unrar from [linuxserver repo](https://github.com/linuxserver/docker-unrar)."}
- { date: "17.06.23:", desc: "Deprecate armhf as per [https://www.linuxserver.io/armhf](https://www.linuxserver.io/armhf)." } - {date: "10.08.23:", desc: "Bump unrar to 6.2.10."}
- { date: "10.06.23:", desc: "Bump unrar to 6.2.8." } - {date: "17.06.23:", desc: "Deprecate armhf as per [https://www.linuxserver.io/armhf](https://www.linuxserver.io/armhf)."}
- { date: "23.02.23:", desc: "Add qt6-qtbase-sqlite to support SQLite database for resume files."} - {date: "10.06.23:", desc: "Bump unrar to 6.2.8."}
- { date: "29.11.22:", desc: "Add openssl1.1-compat for qbittorrent-cli." } - {date: "23.02.23:", desc: "Add qt6-qtbase-sqlite to support SQLite database for resume files."}
- { date: "31.10.22:", desc: "Add libtorrentv1 branch." } - {date: "29.11.22:", desc: "Add openssl1.1-compat for qbittorrent-cli."}
- { date: "31.08.22:", desc: "Rebase to Alpine Edge again to follow latest releases." } - {date: "31.10.22:", desc: "Add libtorrentv1 branch."}
- { date: "12.08.22:", desc: "Bump unrar to 6.1.7." } - {date: "31.08.22:", desc: "Rebase to Alpine Edge again to follow latest releases."}
- { date: "16.06.22:", desc: "Rebase to Alpine 3.16 from edge." } - {date: "12.08.22:", desc: "Bump unrar to 6.1.7."}
- { date: "25.05.22:", desc: "Fetch qbitorrent-cli from upstream repo." } - {date: "16.06.22:", desc: "Rebase to Alpine 3.16 from edge."}
- { date: "02.03.22:", desc: "Add unrar, 7zip, and qbitorrent-cli." } - {date: "25.05.22:", desc: "Fetch qbitorrent-cli from upstream repo."}
- { date: "01.03.22:", desc: "Add python for search plugin support." } - {date: "02.03.22:", desc: "Add unrar, 7zip, and qbitorrent-cli."}
- { date: "23.02.22:", desc: "Rebase to Alpine Edge, install from Alpine repos." } - {date: "01.03.22:", desc: "Add python for search plugin support."}
- { date: "19.02.22:", desc: "Add jq to build-stage" } - {date: "23.02.22:", desc: "Rebase to Alpine Edge, install from Alpine repos."}
- { date: "07.01.22:", desc: "Rebase to Alpine, build from source." } - {date: "19.02.22:", desc: "Add jq to build-stage"}
- { date: "06.01.22:", desc: "Deprecate unstable branch." } - {date: "07.01.22:", desc: "Rebase to Alpine, build from source."}
- { date: "10.02.21:", desc: "Rebase to focal." } - {date: "06.01.22:", desc: "Deprecate unstable branch."}
- { date: "20.01.21:", desc: "Deprecate `UMASK_SET` in favor of UMASK in baseimage, see above for more information." } - {date: "10.02.21:", desc: "Rebase to focal."}
- { date: "12.11.20:", desc: "Stop creating /config/data directory on startup" } - {date: "20.01.21:", desc: "Deprecate `UMASK_SET` in favor of UMASK in baseimage, see above for more information."}
- { date: "03.04.20:", desc: "Fix adding search engine plugin" } - {date: "12.11.20:", desc: "Stop creating /config/data directory on startup"}
- { date: "02.08.19:", desc: "Add qbitorrent-cli for processing scripts." } - {date: "03.04.20:", desc: "Fix adding search engine plugin"}
- { date: "23.03.19:", desc: "Switching to new Base images, shift to arm32v7 tag." } - {date: "02.08.19:", desc: "Add qbitorrent-cli for processing scripts."}
- { date: "14.01.19:", desc: "Rebase to Ubuntu, add multi arch and pipeline logic." } - {date: "23.03.19:", desc: "Switching to new Base images, shift to arm32v7 tag."}
- { date: "25.09.18:", desc: "Use buildstage type build, bump qbitorrent to 4.1.3." } - {date: "14.01.19:", desc: "Rebase to Ubuntu, add multi arch and pipeline logic."}
- { date: "14.08.18:", desc: "Rebase to alpine 3.8, bump libtorrent to 1.1.9 and qbitorrent to 4.1.2." } - {date: "25.09.18:", desc: "Use buildstage type build, bump qbitorrent to 4.1.3."}
- { date: "08.06.18:", desc: "Bump qbitorrent to 4.1.1." } - {date: "14.08.18:", desc: "Rebase to alpine 3.8, bump libtorrent to 1.1.9 and qbitorrent to 4.1.2."}
- { date: "26.04.18:", desc: "Bump libtorrent to 1.1.7." } - {date: "08.06.18:", desc: "Bump qbitorrent to 4.1.1."}
- { date: "02.03.18:", desc: "Bump qbitorrent to 4.0.4 and libtorrent to 1.1.6." } - {date: "26.04.18:", desc: "Bump libtorrent to 1.1.7."}
- { date: "02.01.18:", desc: "Deprecate cpu_core routine lack of scaling." } - {date: "02.03.18:", desc: "Bump qbitorrent to 4.0.4 and libtorrent to 1.1.6."}
- { date: "19.12.17:", desc: "Update to v4.0.3." } - {date: "02.01.18:", desc: "Deprecate cpu_core routine lack of scaling."}
- { date: "09.02.17:", desc: "Rebase to alpine 3.7" } - {date: "19.12.17:", desc: "Update to v4.0.3."}
- { date: "01.12.17:", desc: "Update to v4.0.2." } - {date: "09.02.17:", desc: "Rebase to alpine 3.7"}
- { date: "27.11.17:", desc: "Update to v4 and use cpu_core routine to speed up builds." } - {date: "01.12.17:", desc: "Update to v4.0.2."}
- { date: "16.09.17:", desc: "Bump to 3.3.16, Add WEBUI_PORT variable and notes to README to allow changing port of webui." } - {date: "27.11.17:", desc: "Update to v4 and use cpu_core routine to speed up builds."}
- { date: "01.08.17:", desc: "Initial Release." } - {date: "16.09.17:", desc: "Bump to 3.3.16, Add WEBUI_PORT variable and notes to README to allow changing port of webui."}
- { date: "12.02.18:", desc: "Initial Release." } - {date: "01.08.17:", desc: "Initial Release."}
- {date: "12.02.18:", desc: "Initial Release."}

View File

@@ -9,11 +9,13 @@ if [[ ! -f /config/qBittorrent/qBittorrent.conf ]]; then
cp /defaults/qBittorrent.conf /config/qBittorrent/qBittorrent.conf cp /defaults/qBittorrent.conf /config/qBittorrent/qBittorrent.conf
fi fi
# chown download directory if currently not set to abc if [[ -z ${LSIO_NON_ROOT_USER} ]]; then
if grep -qe ' /downloads ' /proc/mounts; then # chown download directory if currently not set to abc
lsiown abc:abc /downloads if grep -qe ' /downloads ' /proc/mounts; then
fi lsiown abc:abc /downloads
fi
# permissions # permissions
lsiown -R abc:abc \ lsiown -R abc:abc \
/config /config
fi

View File

@@ -6,6 +6,16 @@ if [[ -n "${TORRENTING_PORT}" ]]; then
TORRENTING_PORT_ARG="--torrenting-port=${TORRENTING_PORT}" TORRENTING_PORT_ARG="--torrenting-port=${TORRENTING_PORT}"
fi fi
WEBUI_ADDRESS=$(grep -Po "^WebUI\\\Address=\K(.*)" /config/qBittorrent/qBittorrent.conf)
if [[ -z ${WEBUI_ADDRESS} ]] || [[ ${WEBUI_ADDRESS} == "*" ]]; then
WEBUI_ADDRESS="localhost"
fi
if [[ -z ${LSIO_NON_ROOT_USER} ]]; then
exec \ exec \
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z localhost ${WEBUI_PORT}" \ s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z ${WEBUI_ADDRESS} ${WEBUI_PORT}" \
s6-setuidgid abc /usr/bin/qbittorrent-nox --webui-port="${WEBUI_PORT}" ${TORRENTING_PORT_ARG} s6-setuidgid abc /usr/bin/qbittorrent-nox --webui-port="${WEBUI_PORT}" ${TORRENTING_PORT_ARG}
else
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z ${WEBUI_ADDRESS} ${WEBUI_PORT}" \
/usr/bin/qbittorrent-nox --webui-port="${WEBUI_PORT}" ${TORRENTING_PORT_ARG}
fi

15
root/usr/bin/qbt Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
# qbt bash wrapper to prompt user when trying to save password
if [[ "$@" == "settings set password" ]]; then
echo "Setting password is not supported"
echo "Please use --ask-for-password or --password"
elif [[ "$@" == "settings set"* ]]; then
/qbt/qbt "$@"
elif [[ "$@" != *"--ask-for-password"* ]] && [[ "$@" != *"--password"* ]];then
echo "Please use --ask-for-password or --password and ensure username/url are set"
/qbt/qbt "$@"
else
/qbt/qbt "$@"
fi