mirror of
https://github.com/linuxserver/docker-transmission.git
synced 2025-12-17 21:49:44 +01:00
Compare commits
188 Commits
4.0.3-r3-l
...
4.0.6-r4-l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
950b8b98c9 | ||
|
|
09e0ed6728 | ||
|
|
9013f1b551 | ||
|
|
df00bc1995 | ||
|
|
ddeea3fc6a | ||
|
|
787fa45993 | ||
|
|
89c38bcff0 | ||
|
|
7f8d6b2122 | ||
|
|
ba5848adb1 | ||
|
|
6b3500a710 | ||
|
|
a4f703ce23 | ||
|
|
8426201f56 | ||
|
|
556f938946 | ||
|
|
5396b05953 | ||
|
|
2121759d85 | ||
|
|
9d7f5b8fc2 | ||
|
|
b13ea92136 | ||
|
|
26611c9803 | ||
|
|
2fbe193446 | ||
|
|
2b0e4243fc | ||
|
|
d9d11b3d3f | ||
|
|
c5c77adaf7 | ||
|
|
0c3b360ff1 | ||
|
|
a68085138b | ||
|
|
b18d88a011 | ||
|
|
2424de4ae2 | ||
|
|
ce44e98db1 | ||
|
|
0b493643b5 | ||
|
|
54f6391848 | ||
|
|
52fc6098ff | ||
|
|
2c2f302bb7 | ||
|
|
06402dac36 | ||
|
|
376ada4699 | ||
|
|
d406a62e8d | ||
|
|
89eefd98f1 | ||
|
|
a021304dd5 | ||
|
|
06cea3bf41 | ||
|
|
0c3ef76e9a | ||
|
|
870b650316 | ||
|
|
b09379a882 | ||
|
|
66316f1a0f | ||
|
|
8aea6f6d67 | ||
|
|
c1f7c7da44 | ||
|
|
f2f110774b | ||
|
|
f6e29f5352 | ||
|
|
858c3aff7e | ||
|
|
25a7630ee5 | ||
|
|
fb101e5b40 | ||
|
|
a8ab7adc6d | ||
|
|
a21d375281 | ||
|
|
e465f05167 | ||
|
|
d5950b26e3 | ||
|
|
79c867a1ed | ||
|
|
fdfef16f33 | ||
|
|
904d4dc3bb | ||
|
|
bf24b381a6 | ||
|
|
c7b64ddea3 | ||
|
|
e8ddb2f801 | ||
|
|
c39362988a | ||
|
|
565a03f117 | ||
|
|
cdab98e14d | ||
|
|
32172fa838 | ||
|
|
e40526e7da | ||
|
|
399cf97321 | ||
|
|
d2e8a1000a | ||
|
|
42e8761c94 | ||
|
|
8d55ed33f3 | ||
|
|
4ca680899e | ||
|
|
68c47d2e57 | ||
|
|
50c356ba66 | ||
|
|
409c4e29b4 | ||
|
|
7016756163 | ||
|
|
d0c0ae7618 | ||
|
|
aac8cfb790 | ||
|
|
ad901f73c6 | ||
|
|
d8c01632d1 | ||
|
|
10d5f56917 | ||
|
|
2178304e18 | ||
|
|
ae38e8f243 | ||
|
|
277c3212ff | ||
|
|
396b3964c6 | ||
|
|
0a5b09c781 | ||
|
|
1d5b57712b | ||
|
|
655e8c79ac | ||
|
|
4b77b1cec4 | ||
|
|
b33616a846 | ||
|
|
474d935853 | ||
|
|
59b4095cf2 | ||
|
|
20cf81ec2e | ||
|
|
a63cd602e6 | ||
|
|
eb794378a8 | ||
|
|
f342e8f85e | ||
|
|
d7a929c643 | ||
|
|
66588bb0c7 | ||
|
|
80ed2aebb0 | ||
|
|
a1a8556f28 | ||
|
|
6367901db7 | ||
|
|
2aaa5fb095 | ||
|
|
3856459b15 | ||
|
|
361d87862f | ||
|
|
208f36ac8f | ||
|
|
9a66877360 | ||
|
|
5930ba888c | ||
|
|
f379f9aebc | ||
|
|
0af7946af1 | ||
|
|
78bcde2706 | ||
|
|
015b1ddfc0 | ||
|
|
b78e6966a3 | ||
|
|
10277f6ab8 | ||
|
|
a4cfb9ba00 | ||
|
|
b5a49c95b9 | ||
|
|
3e3513ee1f | ||
|
|
d076799e74 | ||
|
|
de4f2e06d9 | ||
|
|
c41139d8b0 | ||
|
|
13d281cddd | ||
|
|
897c15f760 | ||
|
|
49165bf633 | ||
|
|
69f85214d5 | ||
|
|
e931494071 | ||
|
|
841d52fc86 | ||
|
|
cb5c9b8111 | ||
|
|
be30a606d9 | ||
|
|
776bb25bf7 | ||
|
|
7f98ad2352 | ||
|
|
e5108b6b9a | ||
|
|
9d746b7a58 | ||
|
|
7f0a11f194 | ||
|
|
361c224810 | ||
|
|
2b98bc1130 | ||
|
|
f29dd4a2d2 | ||
|
|
0891066012 | ||
|
|
1d9d237ac6 | ||
|
|
96b4f67778 | ||
|
|
33d58cc41f | ||
|
|
651d26fd9e | ||
|
|
ad00ae6b25 | ||
|
|
1d8e6ded34 | ||
|
|
c94de7d060 | ||
|
|
fab1d0578a | ||
|
|
1dde688650 | ||
|
|
43569603c8 | ||
|
|
548d62e5fe | ||
|
|
0d5a224ccc | ||
|
|
9d3b2caa2f | ||
|
|
928cb57631 | ||
|
|
6bfae65164 | ||
|
|
17bfb9a85b | ||
|
|
8441fac1cd | ||
|
|
97ac2d0759 | ||
|
|
24b82ae1bb | ||
|
|
108f4677b0 | ||
|
|
94238ec4b1 | ||
|
|
8c094ee356 | ||
|
|
1f4eec76d5 | ||
|
|
b62dd78598 | ||
|
|
4f62144bdb | ||
|
|
d4f8db4f1f | ||
|
|
86e448c624 | ||
|
|
8549393003 | ||
|
|
920e7de968 | ||
|
|
c52be375b3 | ||
|
|
c755500a5a | ||
|
|
882a787357 | ||
|
|
ed573643f1 | ||
|
|
d2ba673849 | ||
|
|
c854898c22 | ||
|
|
965c79c3e8 | ||
|
|
1ac66ddc8c | ||
|
|
b9c5cec111 | ||
|
|
e20c68e5f6 | ||
|
|
540a30a245 | ||
|
|
4b6dcf748c | ||
|
|
3a1ad52e23 | ||
|
|
302cb6137a | ||
|
|
11d2e3b61a | ||
|
|
0ff8ff04d5 | ||
|
|
38859997aa | ||
|
|
3cb761689c | ||
|
|
bae31afddb | ||
|
|
bed6a80004 | ||
|
|
c73d76e4c6 | ||
|
|
4fbe981509 | ||
|
|
bb807f43d5 | ||
|
|
8b00f02b3b | ||
|
|
a23c29e5d8 | ||
|
|
e68202edfb | ||
|
|
0eb180ec99 |
6
.github/CONTRIBUTING.md
vendored
6
.github/CONTRIBUTING.md
vendored
@@ -6,7 +6,7 @@
|
|||||||
* Read, and fill the Pull Request template
|
* Read, and fill the Pull Request template
|
||||||
* If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR
|
* If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR
|
||||||
* If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message
|
* If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message
|
||||||
* If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://discord.gg/YWrKVTn)
|
* If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://linuxserver.io/discord)
|
||||||
|
|
||||||
## Common files
|
## Common files
|
||||||
|
|
||||||
@@ -105,10 +105,10 @@ docker build \
|
|||||||
-t linuxserver/transmission:latest .
|
-t linuxserver/transmission:latest .
|
||||||
```
|
```
|
||||||
|
|
||||||
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static`
|
The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
|
||||||
```
|
```
|
||||||
|
|
||||||
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
|
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,7 +1,7 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Discord chat support
|
- name: Discord chat support
|
||||||
url: https://discord.gg/YWrKVTn
|
url: https://linuxserver.io/discord
|
||||||
about: Realtime support / chat with the community and the team.
|
about: Realtime support / chat with the community and the team.
|
||||||
|
|
||||||
- name: Discourse discussion forum
|
- name: Discourse discussion forum
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/issue.bug.yml
vendored
4
.github/ISSUE_TEMPLATE/issue.bug.yml
vendored
@@ -67,10 +67,10 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
description: |
|
description: |
|
||||||
Provide a full docker log, output of "docker logs linuxserver.io"
|
Provide a full docker log, output of "docker logs transmission"
|
||||||
label: Container logs
|
label: Container logs
|
||||||
placeholder: |
|
placeholder: |
|
||||||
Output of `docker logs linuxserver.io`
|
Output of `docker logs transmission`
|
||||||
render: bash
|
render: bash
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
3
.github/workflows/call_issue_pr_tracker.yml
vendored
3
.github/workflows/call_issue_pr_tracker.yml
vendored
@@ -8,6 +8,9 @@ on:
|
|||||||
pull_request_review:
|
pull_request_review:
|
||||||
types: [submitted,edited,dismissed]
|
types: [submitted,edited,dismissed]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
manage-project:
|
manage-project:
|
||||||
permissions:
|
permissions:
|
||||||
|
|||||||
3
.github/workflows/call_issues_cron.yml
vendored
3
.github/workflows/call_issues_cron.yml
vendored
@@ -4,6 +4,9 @@ on:
|
|||||||
- cron: '5 19 * * *'
|
- cron: '5 19 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
permissions:
|
permissions:
|
||||||
|
|||||||
148
.github/workflows/external_trigger.yml
vendored
148
.github/workflows/external_trigger.yml
vendored
@@ -3,27 +3,43 @@ name: External Trigger Main
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
external-trigger-master:
|
external-trigger-master:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3.1.0
|
- uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: External Trigger
|
- name: External Trigger
|
||||||
if: github.ref == 'refs/heads/master'
|
if: github.ref == 'refs/heads/master'
|
||||||
|
env:
|
||||||
|
SKIP_EXTERNAL_TRIGGER: ${{ vars.SKIP_EXTERNAL_TRIGGER }}
|
||||||
run: |
|
run: |
|
||||||
if [ -n "${{ secrets.PAUSE_EXTERNAL_TRIGGER_TRANSMISSION_MASTER }}" ]; then
|
printf "# External trigger for docker-transmission\n\n" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Github secret PAUSE_EXTERNAL_TRIGGER_TRANSMISSION_MASTER is set; skipping trigger. ****"
|
if grep -q "^transmission_master_" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
|
||||||
echo "Github secret \`PAUSE_EXTERNAL_TRIGGER_TRANSMISSION_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`transmission_master_\`; will skip trigger if version matches." >> $GITHUB_STEP_SUMMARY
|
||||||
|
elif grep -q "^transmission_master" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`transmission_master\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "**** External trigger running off of master branch. To disable this trigger, set a Github secret named \"PAUSE_EXTERNAL_TRIGGER_TRANSMISSION_MASTER\". ****"
|
echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "External trigger running off of master branch. To disable this trigger, set a Github secret named \`PAUSE_EXTERNAL_TRIGGER_TRANSMISSION_MASTER\`" >> $GITHUB_STEP_SUMMARY
|
echo "> External trigger running off of master branch. To disable this trigger, add \`transmission_master\` into the Github organizational variable \`SKIP_EXTERNAL_TRIGGER\`." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Retrieving external version ****"
|
printf "\n## Retrieving external version\n\n" >> $GITHUB_STEP_SUMMARY
|
||||||
EXT_RELEASE=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
EXT_RELEASE=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
||||||
&& awk '/^P:'"transmission-daemon"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://')
|
&& awk '/^P:'"transmission-daemon"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://')
|
||||||
|
echo "Type is \`alpine_repo\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
if grep -q "^transmission_master_${EXT_RELEASE}" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` matches current external release; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then
|
if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then
|
||||||
echo "**** Can't retrieve external version, exiting ****"
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Can't retrieve external version, exiting" >> $GITHUB_STEP_SUMMARY
|
||||||
FAILURE_REASON="Can't retrieve external version for transmission branch master"
|
FAILURE_REASON="Can't retrieve external version for transmission branch master"
|
||||||
GHA_TRIGGER_URL="https://github.com/linuxserver/docker-transmission/actions/runs/${{ github.run_id }}"
|
GHA_TRIGGER_URL="https://github.com/linuxserver/docker-transmission/actions/runs/${{ github.run_id }}"
|
||||||
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
|
||||||
@@ -31,25 +47,43 @@ jobs:
|
|||||||
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g')
|
EXT_RELEASE_SANITIZED=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g')
|
||||||
echo "**** External version: ${EXT_RELEASE} ****"
|
echo "Sanitized external version: \`${EXT_RELEASE_SANITIZED}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "External version: ${EXT_RELEASE}" >> $GITHUB_STEP_SUMMARY
|
echo "Retrieving last pushed version" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Retrieving last pushed version ****"
|
|
||||||
image="linuxserver/transmission"
|
image="linuxserver/transmission"
|
||||||
tag="latest"
|
tag="latest"
|
||||||
token=$(curl -sX GET \
|
token=$(curl -sX GET \
|
||||||
"https://ghcr.io/token?scope=repository%3Alinuxserver%2Ftransmission%3Apull" \
|
"https://ghcr.io/token?scope=repository%3Alinuxserver%2Ftransmission%3Apull" \
|
||||||
| jq -r '.token')
|
| jq -r '.token')
|
||||||
multidigest=$(curl -s \
|
multidigest=$(curl -s \
|
||||||
|
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
|
||||||
|
--header "Accept: application/vnd.oci.image.index.v1+json" \
|
||||||
|
--header "Authorization: Bearer ${token}" \
|
||||||
|
"https://ghcr.io/v2/${image}/manifests/${tag}")
|
||||||
|
if jq -e '.layers // empty' <<< "${multidigest}" >/dev/null 2>&1; then
|
||||||
|
# If there's a layer element it's a single-arch manifest so just get that digest
|
||||||
|
digest=$(jq -r '.config.digest' <<< "${multidigest}")
|
||||||
|
else
|
||||||
|
# Otherwise it's multi-arch or has manifest annotations
|
||||||
|
if jq -e '.manifests[]?.annotations // empty' <<< "${multidigest}" >/dev/null 2>&1; then
|
||||||
|
# Check for manifest annotations and delete if found
|
||||||
|
multidigest=$(jq 'del(.manifests[] | select(.annotations))' <<< "${multidigest}")
|
||||||
|
fi
|
||||||
|
if [[ $(jq '.manifests | length' <<< "${multidigest}") -gt 1 ]]; then
|
||||||
|
# If there's still more than one digest, it's multi-arch
|
||||||
|
multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}")
|
||||||
|
else
|
||||||
|
# Otherwise it's single arch
|
||||||
|
multidigest=$(jq -r ".manifests[].digest?" <<< "${multidigest}")
|
||||||
|
fi
|
||||||
|
if digest=$(curl -s \
|
||||||
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
|
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
|
||||||
|
--header "Accept: application/vnd.oci.image.manifest.v1+json" \
|
||||||
--header "Authorization: Bearer ${token}" \
|
--header "Authorization: Bearer ${token}" \
|
||||||
"https://ghcr.io/v2/${image}/manifests/${tag}" \
|
"https://ghcr.io/v2/${image}/manifests/${multidigest}"); then
|
||||||
| jq -r 'first(.manifests[].digest)')
|
digest=$(jq -r '.config.digest' <<< "${digest}");
|
||||||
digest=$(curl -s \
|
fi
|
||||||
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
|
fi
|
||||||
--header "Authorization: Bearer ${token}" \
|
|
||||||
"https://ghcr.io/v2/${image}/manifests/${multidigest}" \
|
|
||||||
| jq -r '.config.digest')
|
|
||||||
image_info=$(curl -sL \
|
image_info=$(curl -sL \
|
||||||
--header "Authorization: Bearer ${token}" \
|
--header "Authorization: Bearer ${token}" \
|
||||||
"https://ghcr.io/v2/${image}/blobs/${digest}")
|
"https://ghcr.io/v2/${image}/blobs/${digest}")
|
||||||
@@ -61,53 +95,61 @@ jobs:
|
|||||||
IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}')
|
IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}')
|
||||||
IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}')
|
IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}')
|
||||||
if [ -z "${IMAGE_VERSION}" ]; then
|
if [ -z "${IMAGE_VERSION}" ]; then
|
||||||
echo "**** Can't retrieve last pushed version, exiting ****"
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Can't retrieve last pushed version, exiting" >> $GITHUB_STEP_SUMMARY
|
||||||
FAILURE_REASON="Can't retrieve last pushed version for transmission tag latest"
|
FAILURE_REASON="Can't retrieve last pushed version for transmission tag latest"
|
||||||
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
|
||||||
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
|
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
|
||||||
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "**** Last pushed version: ${IMAGE_VERSION} ****"
|
echo "Last pushed version: \`${IMAGE_VERSION}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Last pushed version: ${IMAGE_VERSION}" >> $GITHUB_STEP_SUMMARY
|
if [ "${EXT_RELEASE_SANITIZED}" == "${IMAGE_VERSION}" ]; then
|
||||||
if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then
|
echo "Sanitized version \`${EXT_RELEASE_SANITIZED}\` already pushed, exiting" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Version ${EXT_RELEASE} already pushed, exiting ****"
|
|
||||||
echo "Version ${EXT_RELEASE} already pushed, exiting" >> $GITHUB_STEP_SUMMARY
|
|
||||||
exit 0
|
exit 0
|
||||||
elif [[ $(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/aarch64/APKINDEX.tar.gz" | tar -xz -C /tmp && awk '/^P:'"transmission-daemon"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://') != "${EXT_RELEASE}" ]]; then
|
elif [[ $(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/aarch64/APKINDEX.tar.gz" | tar -xz -C /tmp && awk '/^P:'"transmission-daemon"'$/,/V:/' /tmp/APKINDEX | sed -n 2p | sed 's/^V://') != "${EXT_RELEASE}" ]]; then
|
||||||
echo "**** New version ${EXT_RELEASE} found; but not all arch repos updated yet; exiting ****"
|
echo "New version \`${EXT_RELEASE}\` found; but not all arch repos updated yet; exiting" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "New version ${EXT_RELEASE} found; but not all arch repos updated yet; exiting" >> $GITHUB_STEP_SUMMARY
|
|
||||||
FAILURE_REASON="New version ${EXT_RELEASE} for transmission tag latest is detected, however not all arch repos are updated yet. Will try again later."
|
FAILURE_REASON="New version ${EXT_RELEASE} for transmission tag latest is detected, however not all arch repos are updated yet. Will try again later."
|
||||||
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
||||||
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
|
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
|
||||||
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
exit 0
|
exit 0
|
||||||
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
|
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
|
||||||
echo "**** New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting ****"
|
echo "New version \`${EXT_RELEASE}\` found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
|
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
echo "**** New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build ****"
|
if [[ "${artifacts_found}" == "false" ]]; then
|
||||||
echo "New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build" >> $GITHUB_STEP_SUMMARY
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
response=$(curl -iX POST \
|
echo "> New version detected, but not all artifacts are published yet; skipping trigger" >> $GITHUB_STEP_SUMMARY
|
||||||
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/buildWithParameters?PACKAGE_CHECK=false \
|
FAILURE_REASON="New version ${EXT_RELEASE} for transmission tag latest is detected, however not all artifacts are uploaded to upstream release yet. Will try again later."
|
||||||
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
||||||
echo "**** Jenkins job queue url: ${response%$'\r'} ****"
|
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
|
||||||
echo "**** Sleeping 10 seconds until job starts ****"
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
sleep 10
|
else
|
||||||
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
|
printf "\n## Trigger new build\n\n" >> $GITHUB_STEP_SUMMARY
|
||||||
buildurl="${buildurl%$'\r'}"
|
echo "New sanitized version \`${EXT_RELEASE_SANITIZED}\` found; old version was \`${IMAGE_VERSION}\`. Triggering new build" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Jenkins job build url: ${buildurl} ****"
|
if [[ "${artifacts_found}" == "true" ]]; then
|
||||||
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
|
echo "All artifacts seem to be uploaded." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Attempting to change the Jenkins job description ****"
|
fi
|
||||||
curl -iX POST \
|
response=$(curl -iX POST \
|
||||||
"${buildurl}submitDescription" \
|
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/buildWithParameters?PACKAGE_CHECK=false \
|
||||||
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
|
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
|
||||||
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
|
echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
|
||||||
--data-urlencode "Submit=Submit"
|
echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "**** Notifying Discord ****"
|
sleep 10
|
||||||
TRIGGER_REASON="A version change was detected for transmission tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE}"
|
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
|
||||||
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
buildurl="${buildurl%$'\r'}"
|
||||||
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}],
|
echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
|
||||||
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
|
||||||
|
curl -iX POST \
|
||||||
|
"${buildurl}submitDescription" \
|
||||||
|
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
|
||||||
|
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
|
||||||
|
--data-urlencode "Submit=Submit"
|
||||||
|
echo "**** Notifying Discord ****"
|
||||||
|
TRIGGER_REASON="A version change was detected for transmission tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE_SANITIZED}"
|
||||||
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
||||||
|
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}],
|
||||||
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
29
.github/workflows/external_trigger_scheduler.yml
vendored
29
.github/workflows/external_trigger_scheduler.yml
vendored
@@ -5,41 +5,44 @@ on:
|
|||||||
- cron: '1 * * * *'
|
- cron: '1 * * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
external-trigger-scheduler:
|
external-trigger-scheduler:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3.1.0
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
|
|
||||||
- name: External Trigger Scheduler
|
- name: External Trigger Scheduler
|
||||||
run: |
|
run: |
|
||||||
echo "**** Branches found: ****"
|
printf "# External trigger scheduler for docker-transmission\n\n" >> $GITHUB_STEP_SUMMARY
|
||||||
git for-each-ref --format='%(refname:short)' refs/remotes
|
printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
|
||||||
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
|
for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
|
||||||
do
|
do
|
||||||
br=$(echo "$br" | sed 's|origin/||g')
|
if [[ "${br}" == "HEAD" ]]; then
|
||||||
echo "**** Evaluating branch ${br} ****"
|
printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
|
||||||
ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/jenkins-vars.yml)
|
ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/jenkins-vars.yml)
|
||||||
ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
|
ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
|
||||||
ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
|
ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
|
||||||
if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
|
if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
|
||||||
echo "**** Branch ${br} appears to be live and trigger is not os; checking workflow. ****"
|
echo "Branch appears to be live and trigger is not os; checking workflow." >> $GITHUB_STEP_SUMMARY
|
||||||
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
|
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
|
||||||
echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****."
|
echo "Triggering external trigger workflow for branch." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Triggering external trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
curl -iX POST \
|
curl -iX POST \
|
||||||
-H "Authorization: token ${{ secrets.CR_PAT }}" \
|
-H "Authorization: token ${{ secrets.CR_PAT }}" \
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
-d "{\"ref\":\"refs/heads/${br}\"}" \
|
-d "{\"ref\":\"refs/heads/${br}\"}" \
|
||||||
https://api.github.com/repos/linuxserver/docker-transmission/actions/workflows/external_trigger.yml/dispatches
|
https://api.github.com/repos/linuxserver/docker-transmission/actions/workflows/external_trigger.yml/dispatches
|
||||||
else
|
else
|
||||||
echo "**** Workflow doesn't exist; skipping trigger. ****"
|
echo "Skipping branch due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Skipping branch ${br} due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "**** ${br} is either a dev branch, or has no external version; skipping trigger. ****"
|
echo "Skipping branch due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Skipping branch ${br} due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|||||||
6
.github/workflows/greetings.yml
vendored
6
.github/workflows/greetings.yml
vendored
@@ -2,8 +2,14 @@ name: Greetings
|
|||||||
|
|
||||||
on: [pull_request_target, issues]
|
on: [pull_request_target, issues]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
greeting:
|
greeting:
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/first-interaction@v1
|
- uses: actions/first-interaction@v1
|
||||||
|
|||||||
42
.github/workflows/package_trigger.yml
vendored
42
.github/workflows/package_trigger.yml
vendored
@@ -1,42 +0,0 @@
|
|||||||
name: Package Trigger Main
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
package-trigger-master:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3.1.0
|
|
||||||
|
|
||||||
- name: Package Trigger
|
|
||||||
if: github.ref == 'refs/heads/master'
|
|
||||||
run: |
|
|
||||||
if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_TRANSMISSION_MASTER }}" ]; then
|
|
||||||
echo "**** Github secret PAUSE_PACKAGE_TRIGGER_TRANSMISSION_MASTER is set; skipping trigger. ****"
|
|
||||||
echo "Github secret \`PAUSE_PACKAGE_TRIGGER_TRANSMISSION_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
|
|
||||||
echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****"
|
|
||||||
echo "There already seems to be an active build on Jenkins; skipping package trigger" >> $GITHUB_STEP_SUMMARY
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_TRANSMISSION_MASTER\". ****"
|
|
||||||
echo "Package trigger running off of master branch. To disable, set a Github secret named \`PAUSE_PACKAGE_TRIGGER_TRANSMISSION_MASTER\`" >> $GITHUB_STEP_SUMMARY
|
|
||||||
response=$(curl -iX POST \
|
|
||||||
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/master/buildWithParameters?PACKAGE_CHECK=true \
|
|
||||||
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
|
|
||||||
echo "**** Jenkins job queue url: ${response%$'\r'} ****"
|
|
||||||
echo "**** Sleeping 10 seconds until job starts ****"
|
|
||||||
sleep 10
|
|
||||||
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
|
|
||||||
buildurl="${buildurl%$'\r'}"
|
|
||||||
echo "**** Jenkins job build url: ${buildurl} ****"
|
|
||||||
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "**** Attempting to change the Jenkins job description ****"
|
|
||||||
curl -iX POST \
|
|
||||||
"${buildurl}submitDescription" \
|
|
||||||
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
|
|
||||||
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
|
|
||||||
--data-urlencode "Submit=Submit"
|
|
||||||
107
.github/workflows/package_trigger_scheduler.yml
vendored
107
.github/workflows/package_trigger_scheduler.yml
vendored
@@ -5,46 +5,99 @@ on:
|
|||||||
- cron: '15 10 * * 2'
|
- cron: '15 10 * * 2'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
package-trigger-scheduler:
|
package-trigger-scheduler:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3.1.0
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
|
|
||||||
- name: Package Trigger Scheduler
|
- name: Package Trigger Scheduler
|
||||||
|
env:
|
||||||
|
SKIP_PACKAGE_TRIGGER: ${{ vars.SKIP_PACKAGE_TRIGGER }}
|
||||||
run: |
|
run: |
|
||||||
echo "**** Branches found: ****"
|
printf "# Package trigger scheduler for docker-transmission\n\n" >> $GITHUB_STEP_SUMMARY
|
||||||
git for-each-ref --format='%(refname:short)' refs/remotes
|
printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
|
||||||
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
|
for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
|
||||||
do
|
do
|
||||||
br=$(echo "$br" | sed 's|origin/||g')
|
if [[ "${br}" == "HEAD" ]]; then
|
||||||
echo "**** Evaluating branch ${br} ****"
|
printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
|
||||||
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/jenkins-vars.yml | yq -r '.ls_branch')
|
continue
|
||||||
if [ "${br}" == "${ls_branch}" ]; then
|
fi
|
||||||
echo "**** Branch ${br} appears to be live; checking workflow. ****"
|
printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
|
||||||
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then
|
JENKINS_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/jenkins-vars.yml)
|
||||||
echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****"
|
if ! curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/Jenkinsfile >/dev/null 2>&1; then
|
||||||
echo "Triggering package trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
triggered_branches="${triggered_branches}${br} "
|
echo "> No Jenkinsfile found. Branch is either deprecated or is an early dev branch." >> $GITHUB_STEP_SUMMARY
|
||||||
curl -iX POST \
|
skipped_branches="${skipped_branches}${br} "
|
||||||
-H "Authorization: token ${{ secrets.CR_PAT }}" \
|
elif [[ "${br}" == $(yq -r '.ls_branch' <<< "${JENKINS_VARS}") ]]; then
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
echo "Branch appears to be live; checking workflow." >> $GITHUB_STEP_SUMMARY
|
||||||
-d "{\"ref\":\"refs/heads/${br}\"}" \
|
README_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-transmission/${br}/readme-vars.yml)
|
||||||
https://api.github.com/repos/linuxserver/docker-transmission/actions/workflows/package_trigger.yml/dispatches
|
if [[ $(yq -r '.project_deprecation_status' <<< "${README_VARS}") == "true" ]]; then
|
||||||
sleep 30
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Branch appears to be deprecated; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
||||||
|
skipped_branches="${skipped_branches}${br} "
|
||||||
|
elif [[ $(yq -r '.skip_package_check' <<< "${JENKINS_VARS}") == "true" ]]; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Skipping branch ${br} due to \`skip_package_check\` being set in \`jenkins-vars.yml\`." >> $GITHUB_STEP_SUMMARY
|
||||||
|
skipped_branches="${skipped_branches}${br} "
|
||||||
|
elif grep -q "^transmission_${br}" <<< "${SKIP_PACKAGE_TRIGGER}"; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Github organizational variable \`SKIP_PACKAGE_TRIGGER\` contains \`transmission_${br}\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
|
||||||
|
skipped_branches="${skipped_branches}${br} "
|
||||||
|
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/${br}/lastBuild/api/json | jq -r '.building' 2>/dev/null) == "true" ]; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> There already seems to be an active build on Jenkins; skipping package trigger for ${br}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
skipped_branches="${skipped_branches}${br} "
|
||||||
else
|
else
|
||||||
echo "**** Workflow doesn't exist; skipping trigger. ****"
|
echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Skipping branch ${br} due to no package trigger workflow present." >> $GITHUB_STEP_SUMMARY
|
echo "> Triggering package trigger for branch ${br}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
printf "> To disable, add \`transmission_%s\` into the Github organizational variable \`SKIP_PACKAGE_TRIGGER\`.\n\n" "${br}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
triggered_branches="${triggered_branches}${br} "
|
||||||
|
response=$(curl -iX POST \
|
||||||
|
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-transmission/job/${br}/buildWithParameters?PACKAGE_CHECK=true \
|
||||||
|
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
|
||||||
|
if [[ -z "${response}" ]]; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Jenkins build could not be triggered. Skipping branch."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
|
||||||
|
sleep 10
|
||||||
|
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
|
||||||
|
buildurl="${buildurl%$'\r'}"
|
||||||
|
echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
|
||||||
|
if ! curl -ifX POST \
|
||||||
|
"${buildurl}submitDescription" \
|
||||||
|
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
|
||||||
|
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
|
||||||
|
--data-urlencode "Submit=Submit"; then
|
||||||
|
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "> Unable to change the Jenkins job description."
|
||||||
|
fi
|
||||||
|
sleep 20
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "**** ${br} appears to be a dev branch; skipping trigger. ****"
|
|
||||||
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
|
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"
|
if [[ -n "${triggered_branches}" ]] || [[ -n "${skipped_branches}" ]]; then
|
||||||
echo "**** Notifying Discord ****"
|
if [[ -n "${triggered_branches}" ]]; then
|
||||||
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
NOTIFY_BRANCHES="**Triggered:** ${triggered_branches} \n"
|
||||||
"description": "**Package Check Build(s) Triggered for transmission** \n**Branch(es):** '"${triggered_branches}"' \n**Build URL:** '"https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-transmission/activity/"' \n"}],
|
NOTIFY_BUILD_URL="**Build URL:** https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-transmission/activity/ \n"
|
||||||
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"
|
||||||
|
fi
|
||||||
|
if [[ -n "${skipped_branches}" ]]; then
|
||||||
|
NOTIFY_BRANCHES="${NOTIFY_BRANCHES}**Skipped:** ${skipped_branches} \n"
|
||||||
|
fi
|
||||||
|
echo "**** Notifying Discord ****"
|
||||||
|
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
|
||||||
|
"description": "**Package Check Build(s) for transmission** \n'"${NOTIFY_BRANCHES}"''"${NOTIFY_BUILD_URL}"'"}],
|
||||||
|
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
|
fi
|
||||||
|
|||||||
2
.github/workflows/permissions.yml
vendored
2
.github/workflows/permissions.yml
vendored
@@ -5,6 +5,8 @@ on:
|
|||||||
- '**/run'
|
- '**/run'
|
||||||
- '**/finish'
|
- '**/finish'
|
||||||
- '**/check'
|
- '**/check'
|
||||||
|
- 'root/migrations/*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
permission_check:
|
permission_check:
|
||||||
uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1
|
uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1
|
||||||
|
|||||||
18
Dockerfile
18
Dockerfile
@@ -1,8 +1,9 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
|
FROM ghcr.io/linuxserver/unrar:latest AS unrar
|
||||||
|
|
||||||
FROM ghcr.io/linuxserver/baseimage-alpine:edge
|
FROM ghcr.io/linuxserver/baseimage-alpine:edge
|
||||||
|
|
||||||
ARG UNRAR_VERSION=6.2.8
|
|
||||||
ARG BUILD_DATE
|
ARG BUILD_DATE
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG TRANSMISSION_VERSION
|
ARG TRANSMISSION_VERSION
|
||||||
@@ -18,17 +19,6 @@ RUN \
|
|||||||
findutils \
|
findutils \
|
||||||
p7zip \
|
p7zip \
|
||||||
python3 && \
|
python3 && \
|
||||||
echo "**** install unrar from source ****" && \
|
|
||||||
mkdir /tmp/unrar && \
|
|
||||||
curl -o \
|
|
||||||
/tmp/unrar.tar.gz -L \
|
|
||||||
"https://www.rarlab.com/rar/unrarsrc-${UNRAR_VERSION}.tar.gz" && \
|
|
||||||
tar xf \
|
|
||||||
/tmp/unrar.tar.gz -C \
|
|
||||||
/tmp/unrar --strip-components=1 && \
|
|
||||||
cd /tmp/unrar && \
|
|
||||||
make && \
|
|
||||||
install -v -m755 unrar /usr/local/bin && \
|
|
||||||
echo "**** install transmission ****" && \
|
echo "**** install transmission ****" && \
|
||||||
if [ -z ${TRANSMISSION_VERSION+x} ]; then \
|
if [ -z ${TRANSMISSION_VERSION+x} ]; then \
|
||||||
TRANSMISSION_VERSION=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
TRANSMISSION_VERSION=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
||||||
@@ -39,6 +29,7 @@ RUN \
|
|||||||
transmission-daemon==${TRANSMISSION_VERSION} \
|
transmission-daemon==${TRANSMISSION_VERSION} \
|
||||||
transmission-extra==${TRANSMISSION_VERSION} \
|
transmission-extra==${TRANSMISSION_VERSION} \
|
||||||
transmission-remote==${TRANSMISSION_VERSION} && \
|
transmission-remote==${TRANSMISSION_VERSION} && \
|
||||||
|
printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \
|
||||||
echo "**** cleanup ****" && \
|
echo "**** cleanup ****" && \
|
||||||
apk del --purge \
|
apk del --purge \
|
||||||
build-dependencies && \
|
build-dependencies && \
|
||||||
@@ -49,6 +40,9 @@ RUN \
|
|||||||
# copy local files
|
# copy local files
|
||||||
COPY root/ /
|
COPY root/ /
|
||||||
|
|
||||||
|
# add unrar
|
||||||
|
COPY --from=unrar /usr/bin/unrar-alpine /usr/bin/unrar
|
||||||
|
|
||||||
# ports and volumes
|
# ports and volumes
|
||||||
EXPOSE 9091 51413/tcp 51413/udp
|
EXPOSE 9091 51413/tcp 51413/udp
|
||||||
VOLUME /config
|
VOLUME /config
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
|
FROM ghcr.io/linuxserver/unrar:arm64v8-latest AS unrar
|
||||||
|
|
||||||
FROM ghcr.io/linuxserver/baseimage-alpine:arm64v8-edge
|
FROM ghcr.io/linuxserver/baseimage-alpine:arm64v8-edge
|
||||||
|
|
||||||
ARG UNRAR_VERSION=6.2.8
|
|
||||||
ARG BUILD_DATE
|
ARG BUILD_DATE
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG TRANSMISSION_VERSION
|
ARG TRANSMISSION_VERSION
|
||||||
@@ -18,17 +19,6 @@ RUN \
|
|||||||
findutils \
|
findutils \
|
||||||
p7zip \
|
p7zip \
|
||||||
python3 && \
|
python3 && \
|
||||||
echo "**** install unrar from source ****" && \
|
|
||||||
mkdir /tmp/unrar && \
|
|
||||||
curl -o \
|
|
||||||
/tmp/unrar.tar.gz -L \
|
|
||||||
"https://www.rarlab.com/rar/unrarsrc-${UNRAR_VERSION}.tar.gz" && \
|
|
||||||
tar xf \
|
|
||||||
/tmp/unrar.tar.gz -C \
|
|
||||||
/tmp/unrar --strip-components=1 && \
|
|
||||||
cd /tmp/unrar && \
|
|
||||||
make && \
|
|
||||||
install -v -m755 unrar /usr/local/bin && \
|
|
||||||
echo "**** install transmission ****" && \
|
echo "**** install transmission ****" && \
|
||||||
if [ -z ${TRANSMISSION_VERSION+x} ]; then \
|
if [ -z ${TRANSMISSION_VERSION+x} ]; then \
|
||||||
TRANSMISSION_VERSION=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
TRANSMISSION_VERSION=$(curl -sL "http://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz" | tar -xz -C /tmp \
|
||||||
@@ -39,6 +29,7 @@ RUN \
|
|||||||
transmission-daemon==${TRANSMISSION_VERSION} \
|
transmission-daemon==${TRANSMISSION_VERSION} \
|
||||||
transmission-extra==${TRANSMISSION_VERSION} \
|
transmission-extra==${TRANSMISSION_VERSION} \
|
||||||
transmission-remote==${TRANSMISSION_VERSION} && \
|
transmission-remote==${TRANSMISSION_VERSION} && \
|
||||||
|
printf "Linuxserver.io version: ${VERSION}\nBuild-date: ${BUILD_DATE}" > /build_version && \
|
||||||
echo "**** cleanup ****" && \
|
echo "**** cleanup ****" && \
|
||||||
apk del --purge \
|
apk del --purge \
|
||||||
build-dependencies && \
|
build-dependencies && \
|
||||||
@@ -49,6 +40,9 @@ RUN \
|
|||||||
# copy local files
|
# copy local files
|
||||||
COPY root/ /
|
COPY root/ /
|
||||||
|
|
||||||
|
# add unrar
|
||||||
|
COPY --from=unrar /usr/bin/unrar-alpine /usr/bin/unrar
|
||||||
|
|
||||||
# ports and volumes
|
# ports and volumes
|
||||||
EXPOSE 9091 51413/tcp 51413/udp
|
EXPOSE 9091 51413/tcp 51413/udp
|
||||||
VOLUME /config
|
VOLUME /config
|
||||||
|
|||||||
702
Jenkinsfile
vendored
702
Jenkinsfile
vendored
@@ -8,7 +8,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
// Input to determine if this is a package check
|
// Input to determine if this is a package check
|
||||||
parameters {
|
parameters {
|
||||||
string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK')
|
string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK')
|
||||||
}
|
}
|
||||||
// Configuration for the variables used for this specific repo
|
// Configuration for the variables used for this specific repo
|
||||||
environment {
|
environment {
|
||||||
@@ -16,7 +16,9 @@ pipeline {
|
|||||||
GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab')
|
GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab')
|
||||||
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
|
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
|
||||||
GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
|
GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
|
||||||
SCARF_TOKEN=credentials('scarf_api_key')
|
DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat')
|
||||||
|
QUAYIO_API_TOKEN=credentials('quayio-repo-api-token')
|
||||||
|
GIT_SIGNING_KEY=credentials('484fbca6-9a4f-455e-b9e3-97ac98785f5f')
|
||||||
BUILD_VERSION_ARG = 'TRANSMISSION_VERSION'
|
BUILD_VERSION_ARG = 'TRANSMISSION_VERSION'
|
||||||
LS_USER = 'linuxserver'
|
LS_USER = 'linuxserver'
|
||||||
LS_REPO = 'docker-transmission'
|
LS_REPO = 'docker-transmission'
|
||||||
@@ -34,20 +36,46 @@ pipeline {
|
|||||||
CI_PORT='9091'
|
CI_PORT='9091'
|
||||||
CI_SSL='false'
|
CI_SSL='false'
|
||||||
CI_DELAY='120'
|
CI_DELAY='120'
|
||||||
CI_DOCKERENV='TZ=US/Pacific'
|
CI_DOCKERENV=''
|
||||||
CI_AUTH='user:password'
|
CI_AUTH=''
|
||||||
CI_WEBPATH=''
|
CI_WEBPATH=''
|
||||||
}
|
}
|
||||||
stages {
|
stages {
|
||||||
|
stage("Set git config"){
|
||||||
|
steps{
|
||||||
|
sh '''#!/bin/bash
|
||||||
|
cat ${GIT_SIGNING_KEY} > /config/.ssh/id_sign
|
||||||
|
chmod 600 /config/.ssh/id_sign
|
||||||
|
ssh-keygen -y -f /config/.ssh/id_sign > /config/.ssh/id_sign.pub
|
||||||
|
echo "Using $(ssh-keygen -lf /config/.ssh/id_sign) to sign commits"
|
||||||
|
git config --global gpg.format ssh
|
||||||
|
git config --global user.signingkey /config/.ssh/id_sign
|
||||||
|
git config --global commit.gpgsign true
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
}
|
||||||
// Setup all the basic environment variables needed for the build
|
// Setup all the basic environment variables needed for the build
|
||||||
stage("Set ENV Variables base"){
|
stage("Set ENV Variables base"){
|
||||||
steps{
|
steps{
|
||||||
|
echo "Running on node: ${NODE_NAME}"
|
||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
containers=$(docker ps -aq)
|
echo "Pruning builder"
|
||||||
|
docker builder prune -f --builder container || :
|
||||||
|
containers=$(docker ps -q)
|
||||||
if [[ -n "${containers}" ]]; then
|
if [[ -n "${containers}" ]]; then
|
||||||
docker stop ${containers}
|
BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
|
||||||
|
for container in ${containers}; do
|
||||||
|
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
|
||||||
|
echo "skipping buildx container in docker stop"
|
||||||
|
else
|
||||||
|
echo "Stopping container ${container}"
|
||||||
|
docker stop ${container}
|
||||||
|
fi
|
||||||
|
done
|
||||||
fi
|
fi
|
||||||
docker system prune -af --volumes || : '''
|
docker system prune -f --volumes || :
|
||||||
|
docker image prune -af || :
|
||||||
|
'''
|
||||||
script{
|
script{
|
||||||
env.EXIT_STATUS = ''
|
env.EXIT_STATUS = ''
|
||||||
env.LS_RELEASE = sh(
|
env.LS_RELEASE = sh(
|
||||||
@@ -62,11 +90,20 @@ pipeline {
|
|||||||
env.COMMIT_SHA = sh(
|
env.COMMIT_SHA = sh(
|
||||||
script: '''git rev-parse HEAD''',
|
script: '''git rev-parse HEAD''',
|
||||||
returnStdout: true).trim()
|
returnStdout: true).trim()
|
||||||
|
env.GH_DEFAULT_BRANCH = sh(
|
||||||
|
script: '''git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||' ''',
|
||||||
|
returnStdout: true).trim()
|
||||||
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
|
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
|
||||||
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
|
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
|
||||||
env.PULL_REQUEST = env.CHANGE_ID
|
env.PULL_REQUEST = env.CHANGE_ID
|
||||||
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml'
|
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml'
|
||||||
|
if ( env.SYFT_IMAGE_TAG == null ) {
|
||||||
|
env.SYFT_IMAGE_TAG = 'latest'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
echo "Using syft image tag ${SYFT_IMAGE_TAG}"
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" '''
|
||||||
script{
|
script{
|
||||||
env.LS_RELEASE_NUMBER = sh(
|
env.LS_RELEASE_NUMBER = sh(
|
||||||
script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''',
|
script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''',
|
||||||
@@ -125,7 +162,7 @@ pipeline {
|
|||||||
steps{
|
steps{
|
||||||
script{
|
script{
|
||||||
env.EXT_RELEASE_CLEAN = sh(
|
env.EXT_RELEASE_CLEAN = sh(
|
||||||
script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g' ''',
|
script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/ ]//g' ''',
|
||||||
returnStdout: true).trim()
|
returnStdout: true).trim()
|
||||||
|
|
||||||
def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/
|
def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/
|
||||||
@@ -143,7 +180,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (env.SEMVER != null) {
|
if (env.SEMVER != null) {
|
||||||
if (BRANCH_NAME != "master" && BRANCH_NAME != "main") {
|
if (BRANCH_NAME != "${env.GH_DEFAULT_BRANCH}") {
|
||||||
env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}"
|
env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}"
|
||||||
}
|
}
|
||||||
println("SEMVER: ${env.SEMVER}")
|
println("SEMVER: ${env.SEMVER}")
|
||||||
@@ -174,6 +211,7 @@ pipeline {
|
|||||||
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
|
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
|
||||||
env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
|
env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
|
||||||
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
||||||
|
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -198,6 +236,7 @@ pipeline {
|
|||||||
env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
|
env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
|
||||||
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
||||||
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/'
|
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/'
|
||||||
|
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -222,6 +261,7 @@ pipeline {
|
|||||||
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
|
||||||
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
|
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
|
||||||
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
|
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
|
||||||
|
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -244,9 +284,11 @@ pipeline {
|
|||||||
-v ${WORKSPACE}:/mnt \
|
-v ${WORKSPACE}:/mnt \
|
||||||
-e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \
|
-e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \
|
||||||
-e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \
|
-e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \
|
||||||
ghcr.io/linuxserver/baseimage-alpine:3.17 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\
|
ghcr.io/linuxserver/baseimage-alpine:3.20 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\
|
||||||
apk add --no-cache py3-pip && \
|
apk add --no-cache python3 && \
|
||||||
pip install s3cmd && \
|
python3 -m venv /lsiopy && \
|
||||||
|
pip install --no-cache-dir -U pip && \
|
||||||
|
pip install --no-cache-dir s3cmd && \
|
||||||
s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :'''
|
s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -265,8 +307,15 @@ pipeline {
|
|||||||
set -e
|
set -e
|
||||||
TEMPDIR=$(mktemp -d)
|
TEMPDIR=$(mktemp -d)
|
||||||
docker pull ghcr.io/linuxserver/jenkins-builder:latest
|
docker pull ghcr.io/linuxserver/jenkins-builder:latest
|
||||||
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH=master -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest
|
# Cloned repo paths for templating:
|
||||||
# Stage 1 - Jenkinsfile update
|
# ${TEMPDIR}/docker-${CONTAINER_NAME}: Cloned branch master of ${LS_USER}/${LS_REPO} for running the jenkins builder on
|
||||||
|
# ${TEMPDIR}/repo/${LS_REPO}: Cloned branch master of ${LS_USER}/${LS_REPO} for commiting various templated file changes and pushing back to Github
|
||||||
|
# ${TEMPDIR}/docs/docker-documentation: Cloned docs repo for pushing docs updates to Github
|
||||||
|
# ${TEMPDIR}/unraid/docker-templates: Cloned docker-templates repo to check for logos
|
||||||
|
# ${TEMPDIR}/unraid/templates: Cloned templates repo for commiting unraid template changes and pushing back to Github
|
||||||
|
git clone --branch master --depth 1 https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/docker-${CONTAINER_NAME}
|
||||||
|
docker run --rm -v ${TEMPDIR}/docker-${CONTAINER_NAME}:/tmp -e LOCAL=true -e PUID=$(id -u) -e PGID=$(id -g) ghcr.io/linuxserver/jenkins-builder:latest
|
||||||
|
echo "Starting Stage 1 - Jenkinsfile update"
|
||||||
if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then
|
if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then
|
||||||
mkdir -p ${TEMPDIR}/repo
|
mkdir -p ${TEMPDIR}/repo
|
||||||
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
|
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
|
||||||
@@ -275,16 +324,17 @@ pipeline {
|
|||||||
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/
|
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/
|
||||||
git add Jenkinsfile
|
git add Jenkinsfile
|
||||||
git commit -m 'Bot Updating Templated Files'
|
git commit -m 'Bot Updating Templated Files'
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
echo "Updating Jenkinsfile"
|
echo "Updating Jenkinsfile and exiting build, new one will trigger based on commit"
|
||||||
rm -Rf ${TEMPDIR}
|
rm -Rf ${TEMPDIR}
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
echo "Jenkinsfile is up to date."
|
echo "Jenkinsfile is up to date."
|
||||||
fi
|
fi
|
||||||
# Stage 2 - Delete old templates
|
echo "Starting Stage 2 - Delete old templates"
|
||||||
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml"
|
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml .github/workflows/package_trigger.yml"
|
||||||
for i in ${OLD_TEMPLATES}; do
|
for i in ${OLD_TEMPLATES}; do
|
||||||
if [[ -f "${i}" ]]; then
|
if [[ -f "${i}" ]]; then
|
||||||
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
|
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
|
||||||
@@ -299,15 +349,45 @@ pipeline {
|
|||||||
git rm "${i}"
|
git rm "${i}"
|
||||||
done
|
done
|
||||||
git commit -m 'Bot Updating Templated Files'
|
git commit -m 'Bot Updating Templated Files'
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
echo "Deleting old and deprecated templates"
|
echo "Deleting old/deprecated templates and exiting build, new one will trigger based on commit"
|
||||||
rm -Rf ${TEMPDIR}
|
rm -Rf ${TEMPDIR}
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
echo "No templates to delete"
|
echo "No templates to delete"
|
||||||
fi
|
fi
|
||||||
# Stage 3 - Update templates
|
echo "Starting Stage 2.5 - Update init diagram"
|
||||||
|
if ! grep -q 'init_diagram:' readme-vars.yml; then
|
||||||
|
echo "Adding the key 'init_diagram' to readme-vars.yml"
|
||||||
|
sed -i '\\|^#.*changelog.*$|d' readme-vars.yml
|
||||||
|
sed -i 's|^changelogs:|# init diagram\\ninit_diagram:\\n\\n# changelog\\nchangelogs:|' readme-vars.yml
|
||||||
|
fi
|
||||||
|
mkdir -p ${TEMPDIR}/d2
|
||||||
|
docker run --rm -v ${TEMPDIR}/d2:/output -e PUID=$(id -u) -e PGID=$(id -g) -e RAW="true" ghcr.io/linuxserver/d2-builder:latest ${CONTAINER_NAME}:latest
|
||||||
|
ls -al ${TEMPDIR}/d2
|
||||||
|
yq -ei ".init_diagram |= load_str(\\"${TEMPDIR}/d2/${CONTAINER_NAME}-latest.d2\\")" readme-vars.yml
|
||||||
|
if [[ $(md5sum readme-vars.yml | cut -c1-8) != $(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/readme-vars.yml | cut -c1-8) ]]; then
|
||||||
|
echo "'init_diagram' has been updated. Updating repo and exiting build, new one will trigger based on commit."
|
||||||
|
mkdir -p ${TEMPDIR}/repo
|
||||||
|
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
|
||||||
|
cd ${TEMPDIR}/repo/${LS_REPO}
|
||||||
|
git checkout -f master
|
||||||
|
cp ${WORKSPACE}/readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/readme-vars.yml
|
||||||
|
git add readme-vars.yml
|
||||||
|
git commit -m 'Bot Updating Templated Files'
|
||||||
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
echo "Updating templates and exiting build, new one will trigger based on commit"
|
||||||
|
rm -Rf ${TEMPDIR}
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
echo "Init diagram is unchanged"
|
||||||
|
fi
|
||||||
|
echo "Starting Stage 3 - Update templates"
|
||||||
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
|
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
|
||||||
cd ${TEMPDIR}/docker-${CONTAINER_NAME}
|
cd ${TEMPDIR}/docker-${CONTAINER_NAME}
|
||||||
NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
|
NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
|
||||||
@@ -320,38 +400,58 @@ pipeline {
|
|||||||
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/workflows
|
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/workflows
|
||||||
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/ISSUE_TEMPLATE
|
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/ISSUE_TEMPLATE
|
||||||
cp --parents ${TEMPLATED_FILES} ${TEMPDIR}/repo/${LS_REPO}/ || :
|
cp --parents ${TEMPLATED_FILES} ${TEMPDIR}/repo/${LS_REPO}/ || :
|
||||||
|
cp --parents readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/ || :
|
||||||
cd ${TEMPDIR}/repo/${LS_REPO}/
|
cd ${TEMPDIR}/repo/${LS_REPO}/
|
||||||
if ! grep -q '.jenkins-external' .gitignore 2>/dev/null; then
|
if ! grep -q '.jenkins-external' .gitignore 2>/dev/null; then
|
||||||
echo ".jenkins-external" >> .gitignore
|
echo ".jenkins-external" >> .gitignore
|
||||||
git add .gitignore
|
git add .gitignore
|
||||||
fi
|
fi
|
||||||
git add ${TEMPLATED_FILES}
|
git add readme-vars.yml ${TEMPLATED_FILES}
|
||||||
git commit -m 'Bot Updating Templated Files'
|
git commit -m 'Bot Updating Templated Files'
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
echo "Updating templates and exiting build, new one will trigger based on commit"
|
||||||
|
rm -Rf ${TEMPDIR}
|
||||||
|
exit 0
|
||||||
else
|
else
|
||||||
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
echo "No templates to update"
|
||||||
fi
|
fi
|
||||||
mkdir -p ${TEMPDIR}/gitbook
|
echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub"
|
||||||
git clone https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/gitbook/docker-documentation
|
mkdir -p ${TEMPDIR}/docs
|
||||||
if [[ ("${BRANCH_NAME}" == "master") || ("${BRANCH_NAME}" == "main") ]] && [[ (! -f ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then
|
git clone --depth=1 https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation
|
||||||
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/gitbook/docker-documentation/images/
|
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then
|
||||||
cd ${TEMPDIR}/gitbook/docker-documentation/
|
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/
|
||||||
git add images/docker-${CONTAINER_NAME}.md
|
cd ${TEMPDIR}/docs/docker-documentation
|
||||||
|
GH_DOCS_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
|
||||||
|
git add docs/images/docker-${CONTAINER_NAME}.md
|
||||||
|
echo "Updating docs repo"
|
||||||
git commit -m 'Bot Updating Documentation'
|
git commit -m 'Bot Updating Documentation'
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} || \
|
||||||
|
(MAXWAIT="10" && echo "Push to docs failed, trying again in ${MAXWAIT} seconds" && \
|
||||||
|
sleep $((RANDOM % MAXWAIT)) && \
|
||||||
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase && \
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH})
|
||||||
|
else
|
||||||
|
echo "Docs update not needed, skipping"
|
||||||
fi
|
fi
|
||||||
mkdir -p ${TEMPDIR}/unraid
|
mkdir -p ${TEMPDIR}/unraid
|
||||||
git clone https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates
|
git clone --depth=1 https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates
|
||||||
git clone https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates
|
git clone --depth=1 https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates
|
||||||
if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then
|
if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then
|
||||||
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
|
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
|
||||||
elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then
|
elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then
|
||||||
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-icon.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
|
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-icon.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
|
||||||
fi
|
fi
|
||||||
if [[ ("${BRANCH_NAME}" == "master") || ("${BRANCH_NAME}" == "main") ]] && [[ (! -f ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml) || ("$(md5sum ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml | awk '{ print $1 }')") ]]; then
|
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml) || ("$(md5sum ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml | awk '{ print $1 }')") ]]; then
|
||||||
|
echo "Updating Unraid template"
|
||||||
cd ${TEMPDIR}/unraid/templates/
|
cd ${TEMPDIR}/unraid/templates/
|
||||||
if grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then
|
GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
|
||||||
|
if grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then
|
||||||
|
echo "Image is on the ignore list, and already in the deprecation folder."
|
||||||
|
elif grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then
|
||||||
echo "Image is on the ignore list, marking Unraid template as deprecated"
|
echo "Image is on the ignore list, marking Unraid template as deprecated"
|
||||||
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/
|
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/
|
||||||
git add -u unraid/${CONTAINER_NAME}.xml
|
git add -u unraid/${CONTAINER_NAME}.xml
|
||||||
@@ -362,7 +462,42 @@ pipeline {
|
|||||||
git add unraid/${CONTAINER_NAME}.xml
|
git add unraid/${CONTAINER_NAME}.xml
|
||||||
git commit -m 'Bot Updating Unraid Template'
|
git commit -m 'Bot Updating Unraid Template'
|
||||||
fi
|
fi
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} || \
|
||||||
|
(MAXWAIT="10" && echo "Push to unraid templates failed, trying again in ${MAXWAIT} seconds" && \
|
||||||
|
sleep $((RANDOM % MAXWAIT)) && \
|
||||||
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase && \
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH})
|
||||||
|
else
|
||||||
|
echo "No updates to Unraid template needed, skipping"
|
||||||
|
fi
|
||||||
|
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]]; then
|
||||||
|
if [[ $(cat ${TEMPDIR}/docker-${CONTAINER_NAME}/README.md | wc -m) -gt 25000 ]]; then
|
||||||
|
echo "Readme is longer than 25,000 characters. Syncing the lite version to Docker Hub"
|
||||||
|
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/README.lite"
|
||||||
|
else
|
||||||
|
echo "Syncing readme to Docker Hub"
|
||||||
|
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/README.md"
|
||||||
|
fi
|
||||||
|
if curl -s https://hub.docker.com/v2/namespaces/${DOCKERHUB_IMAGE%%/*}/repositories/${DOCKERHUB_IMAGE##*/}/tags | jq -r '.message' | grep -q 404; then
|
||||||
|
echo "Docker Hub endpoint doesn't exist. Creating endpoint first."
|
||||||
|
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
|
||||||
|
curl -s \
|
||||||
|
-H "Authorization: JWT ${DH_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-X POST \
|
||||||
|
-d '{"name":"'${DOCKERHUB_IMAGE##*/}'", "namespace":"'${DOCKERHUB_IMAGE%%/*}'"}' \
|
||||||
|
https://hub.docker.com/v2/repositories/ || :
|
||||||
|
fi
|
||||||
|
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
|
||||||
|
curl -s \
|
||||||
|
-H "Authorization: JWT ${DH_TOKEN}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-X PATCH \
|
||||||
|
-d "{\\"full_description\\":$(jq -Rsa . ${DH_README_SYNC_PATH})}" \
|
||||||
|
https://hub.docker.com/v2/repositories/${DOCKERHUB_IMAGE} || :
|
||||||
|
else
|
||||||
|
echo "Not the default Github branch. Skipping readme sync to Docker Hub."
|
||||||
fi
|
fi
|
||||||
rm -Rf ${TEMPDIR}'''
|
rm -Rf ${TEMPDIR}'''
|
||||||
script{
|
script{
|
||||||
@@ -409,52 +544,27 @@ pipeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* #######################
|
/* #######################
|
||||||
GitLab Mirroring
|
GitLab Mirroring and Quay.io Repo Visibility
|
||||||
####################### */
|
####################### */
|
||||||
// Ping into Gitlab to mirror this repo and have a registry endpoint
|
// Ping into Gitlab to mirror this repo and have a registry endpoint & mark this repo on Quay.io as public
|
||||||
stage("GitLab Mirror"){
|
stage("GitLab Mirror and Quay.io Visibility"){
|
||||||
when {
|
when {
|
||||||
environment name: 'EXIT_STATUS', value: ''
|
environment name: 'EXIT_STATUS', value: ''
|
||||||
}
|
}
|
||||||
steps{
|
steps{
|
||||||
sh '''curl -H "Content-Type: application/json" -H "Private-Token: ${GITLAB_TOKEN}" -X POST https://gitlab.com/api/v4/projects \
|
sh '''curl -H "Content-Type: application/json" -H "Private-Token: ${GITLAB_TOKEN}" -X POST https://gitlab.com/api/v4/projects \
|
||||||
-d '{"namespace_id":'${GITLAB_NAMESPACE}',\
|
-d '{"namespace_id":'${GITLAB_NAMESPACE}',\
|
||||||
"name":"'${LS_REPO}'",
|
"name":"'${LS_REPO}'",
|
||||||
"mirror":true,\
|
"mirror":true,\
|
||||||
"import_url":"https://github.com/linuxserver/'${LS_REPO}'.git",\
|
"import_url":"https://github.com/linuxserver/'${LS_REPO}'.git",\
|
||||||
"issues_access_level":"disabled",\
|
"issues_access_level":"disabled",\
|
||||||
"merge_requests_access_level":"disabled",\
|
"merge_requests_access_level":"disabled",\
|
||||||
"repository_access_level":"enabled",\
|
"repository_access_level":"enabled",\
|
||||||
"visibility":"public"}' '''
|
"visibility":"public"}' '''
|
||||||
}
|
sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \
|
||||||
}
|
-d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" '''
|
||||||
/* #######################
|
sh '''curl -H "Content-Type: application/json" -H "Authorization: Bearer ${QUAYIO_API_TOKEN}" -X POST "https://quay.io/api/v1/repository${QUAYIMAGE/quay.io/}/changevisibility" \
|
||||||
Scarf.sh package registry
|
-d '{"visibility":"public"}' ||: '''
|
||||||
####################### */
|
|
||||||
// Add package to Scarf.sh and set permissions
|
|
||||||
stage("Scarf.sh package registry"){
|
|
||||||
when {
|
|
||||||
branch "master"
|
|
||||||
environment name: 'EXIT_STATUS', value: ''
|
|
||||||
}
|
|
||||||
steps{
|
|
||||||
sh '''#! /bin/bash
|
|
||||||
PACKAGE_UUID=$(curl -X GET -H "Authorization: Bearer ${SCARF_TOKEN}" https://scarf.sh/api/v1/organizations/linuxserver-ci/packages | jq -r '.[] | select(.name=="linuxserver/transmission") | .uuid' || :)
|
|
||||||
if [ -z "${PACKAGE_UUID}" ]; then
|
|
||||||
echo "Adding package to Scarf.sh"
|
|
||||||
curl -sX POST https://scarf.sh/api/v1/organizations/linuxserver-ci/packages \
|
|
||||||
-H "Authorization: Bearer ${SCARF_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"name":"linuxserver/transmission",\
|
|
||||||
"shortDescription":"example description",\
|
|
||||||
"libraryType":"docker",\
|
|
||||||
"website":"https://github.com/linuxserver/docker-transmission",\
|
|
||||||
"backendUrl":"https://ghcr.io/linuxserver/transmission",\
|
|
||||||
"publicUrl":"https://lscr.io/linuxserver/transmission"}' || :
|
|
||||||
else
|
|
||||||
echo "Package already exists on Scarf.sh"
|
|
||||||
fi
|
|
||||||
'''
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* ###############
|
/* ###############
|
||||||
@@ -485,7 +595,42 @@ pipeline {
|
|||||||
--label \"org.opencontainers.image.title=Transmission\" \
|
--label \"org.opencontainers.image.title=Transmission\" \
|
||||||
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
||||||
--no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \
|
--no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \
|
||||||
|
--provenance=true --sbom=true --builder=container --load \
|
||||||
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
set -e
|
||||||
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
done
|
||||||
|
'''
|
||||||
|
withCredentials([
|
||||||
|
[
|
||||||
|
$class: 'UsernamePasswordMultiBinding',
|
||||||
|
credentialsId: 'Quay.io-Robot',
|
||||||
|
usernameVariable: 'QUAYUSER',
|
||||||
|
passwordVariable: 'QUAYPASS'
|
||||||
|
]
|
||||||
|
]) {
|
||||||
|
retry_backoff(5,5) {
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
set -e
|
||||||
|
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
|
||||||
|
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
||||||
|
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
|
||||||
|
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
|
||||||
|
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
|
||||||
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
|
||||||
|
done
|
||||||
|
for p in $(jobs -p); do
|
||||||
|
wait "$p" || { echo "job $p failed" >&2; exit 1; }
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Build MultiArch Docker containers for push to LS Repo
|
// Build MultiArch Docker containers for push to LS Repo
|
||||||
@@ -516,7 +661,42 @@ pipeline {
|
|||||||
--label \"org.opencontainers.image.title=Transmission\" \
|
--label \"org.opencontainers.image.title=Transmission\" \
|
||||||
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
||||||
--no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \
|
--no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \
|
||||||
|
--provenance=true --sbom=true --builder=container --load \
|
||||||
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
set -e
|
||||||
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
done
|
||||||
|
'''
|
||||||
|
withCredentials([
|
||||||
|
[
|
||||||
|
$class: 'UsernamePasswordMultiBinding',
|
||||||
|
credentialsId: 'Quay.io-Robot',
|
||||||
|
usernameVariable: 'QUAYUSER',
|
||||||
|
passwordVariable: 'QUAYPASS'
|
||||||
|
]
|
||||||
|
]) {
|
||||||
|
retry_backoff(5,5) {
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
set -e
|
||||||
|
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
|
||||||
|
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
||||||
|
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
|
||||||
|
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
|
||||||
|
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
|
||||||
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
|
||||||
|
done
|
||||||
|
for p in $(jobs -p); do
|
||||||
|
wait "$p" || { echo "job $p failed" >&2; exit 1; }
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage('Build ARM64') {
|
stage('Build ARM64') {
|
||||||
@@ -525,10 +705,6 @@ pipeline {
|
|||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
echo "Running on node: ${NODE_NAME}"
|
echo "Running on node: ${NODE_NAME}"
|
||||||
echo 'Logging into Github'
|
|
||||||
sh '''#! /bin/bash
|
|
||||||
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
|
||||||
'''
|
|
||||||
sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64"
|
sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64"
|
||||||
sh "docker buildx build \
|
sh "docker buildx build \
|
||||||
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
|
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
|
||||||
@@ -544,17 +720,50 @@ pipeline {
|
|||||||
--label \"org.opencontainers.image.title=Transmission\" \
|
--label \"org.opencontainers.image.title=Transmission\" \
|
||||||
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
--label \"org.opencontainers.image.description=[Transmission](https://www.transmissionbt.com/) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more.\" \
|
||||||
--no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \
|
--no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \
|
||||||
|
--provenance=true --sbom=true --builder=container --load \
|
||||||
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
|
||||||
sh "docker tag ${IMAGE}:arm64v8-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}"
|
sh '''#! /bin/bash
|
||||||
retry(5) {
|
set -e
|
||||||
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}"
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
done
|
||||||
|
'''
|
||||||
|
withCredentials([
|
||||||
|
[
|
||||||
|
$class: 'UsernamePasswordMultiBinding',
|
||||||
|
credentialsId: 'Quay.io-Robot',
|
||||||
|
usernameVariable: 'QUAYUSER',
|
||||||
|
passwordVariable: 'QUAYPASS'
|
||||||
|
]
|
||||||
|
]) {
|
||||||
|
retry_backoff(5,5) {
|
||||||
|
sh '''#! /bin/bash
|
||||||
|
set -e
|
||||||
|
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
|
||||||
|
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
||||||
|
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
|
||||||
|
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
|
||||||
|
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
|
||||||
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
|
for i in "${CACHE[@]}"; do
|
||||||
|
docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} &
|
||||||
|
done
|
||||||
|
for p in $(jobs -p); do
|
||||||
|
wait "$p" || { echo "job $p failed" >&2; exit 1; }
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
'''
|
||||||
|
}
|
||||||
}
|
}
|
||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
containers=$(docker ps -aq)
|
containers=$(docker ps -aq)
|
||||||
if [[ -n "${containers}" ]]; then
|
if [[ -n "${containers}" ]]; then
|
||||||
docker stop ${containers}
|
docker stop ${containers}
|
||||||
fi
|
fi
|
||||||
docker system prune -af --volumes || : '''
|
docker system prune -f --volumes || :
|
||||||
|
docker image prune -af || :
|
||||||
|
'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -570,7 +779,7 @@ pipeline {
|
|||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
TEMPDIR=$(mktemp -d)
|
TEMPDIR=$(mktemp -d)
|
||||||
if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" == "false" ]; then
|
if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" != "true" ]; then
|
||||||
LOCAL_CONTAINER=${IMAGE}:amd64-${META_TAG}
|
LOCAL_CONTAINER=${IMAGE}:amd64-${META_TAG}
|
||||||
else
|
else
|
||||||
LOCAL_CONTAINER=${IMAGE}:${META_TAG}
|
LOCAL_CONTAINER=${IMAGE}:${META_TAG}
|
||||||
@@ -579,7 +788,7 @@ pipeline {
|
|||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v /var/run/docker.sock:/var/run/docker.sock:ro \
|
-v /var/run/docker.sock:/var/run/docker.sock:ro \
|
||||||
-v ${TEMPDIR}:/tmp \
|
-v ${TEMPDIR}:/tmp \
|
||||||
ghcr.io/anchore/syft:latest \
|
ghcr.io/anchore/syft:${SYFT_IMAGE_TAG} \
|
||||||
${LOCAL_CONTAINER} -o table=/tmp/package_versions.txt
|
${LOCAL_CONTAINER} -o table=/tmp/package_versions.txt
|
||||||
NEW_PACKAGE_TAG=$(md5sum ${TEMPDIR}/package_versions.txt | cut -c1-8 )
|
NEW_PACKAGE_TAG=$(md5sum ${TEMPDIR}/package_versions.txt | cut -c1-8 )
|
||||||
echo "Package tag sha from current packages in buit container is ${NEW_PACKAGE_TAG} comparing to old ${PACKAGE_TAG} from github"
|
echo "Package tag sha from current packages in buit container is ${NEW_PACKAGE_TAG} comparing to old ${PACKAGE_TAG} from github"
|
||||||
@@ -591,7 +800,8 @@ pipeline {
|
|||||||
wait
|
wait
|
||||||
git add package_versions.txt
|
git add package_versions.txt
|
||||||
git commit -m 'Bot Updating Package Versions'
|
git commit -m 'Bot Updating Package Versions'
|
||||||
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all
|
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
|
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
|
||||||
echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER}
|
echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
echo "Package tag updated, stopping build process"
|
echo "Package tag updated, stopping build process"
|
||||||
else
|
else
|
||||||
@@ -657,18 +867,27 @@ pipeline {
|
|||||||
}
|
}
|
||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
if grep -q 'docker-baseimage' <<< "${LS_REPO}"; then
|
||||||
|
echo "Detected baseimage, setting LSIO_FIRST_PARTY=true"
|
||||||
|
if [ -n "${CI_DOCKERENV}" ]; then
|
||||||
|
CI_DOCKERENV="LSIO_FIRST_PARTY=true|${CI_DOCKERENV}"
|
||||||
|
else
|
||||||
|
CI_DOCKERENV="LSIO_FIRST_PARTY=true"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
docker pull ghcr.io/linuxserver/ci:latest
|
docker pull ghcr.io/linuxserver/ci:latest
|
||||||
if [ "${MULTIARCH}" == "true" ]; then
|
if [ "${MULTIARCH}" == "true" ]; then
|
||||||
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
|
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64
|
||||||
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
|
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
|
||||||
fi
|
fi
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
--shm-size=1gb \
|
--shm-size=1gb \
|
||||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||||
-e IMAGE=\"${IMAGE}\" \
|
-e IMAGE=\"${IMAGE}\" \
|
||||||
-e DELAY_START=\"${CI_DELAY}\" \
|
-e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \
|
||||||
-e TAGS=\"${CI_TAGS}\" \
|
-e TAGS=\"${CI_TAGS}\" \
|
||||||
-e META_TAG=\"${META_TAG}\" \
|
-e META_TAG=\"${META_TAG}\" \
|
||||||
|
-e RELEASE_TAG=\"latest\" \
|
||||||
-e PORT=\"${CI_PORT}\" \
|
-e PORT=\"${CI_PORT}\" \
|
||||||
-e SSL=\"${CI_SSL}\" \
|
-e SSL=\"${CI_SSL}\" \
|
||||||
-e BASE=\"${DIST_IMAGE}\" \
|
-e BASE=\"${DIST_IMAGE}\" \
|
||||||
@@ -678,6 +897,8 @@ pipeline {
|
|||||||
-e WEB_SCREENSHOT=\"${CI_WEB}\" \
|
-e WEB_SCREENSHOT=\"${CI_WEB}\" \
|
||||||
-e WEB_AUTH=\"${CI_AUTH}\" \
|
-e WEB_AUTH=\"${CI_AUTH}\" \
|
||||||
-e WEB_PATH=\"${CI_WEBPATH}\" \
|
-e WEB_PATH=\"${CI_WEBPATH}\" \
|
||||||
|
-e NODE_NAME=\"${NODE_NAME}\" \
|
||||||
|
-e SYFT_IMAGE_TAG=\"${CI_SYFT_IMAGE_TAG:-${SYFT_IMAGE_TAG}}\" \
|
||||||
-t ghcr.io/linuxserver/ci:latest \
|
-t ghcr.io/linuxserver/ci:latest \
|
||||||
python3 test_build.py'''
|
python3 test_build.py'''
|
||||||
}
|
}
|
||||||
@@ -693,43 +914,23 @@ pipeline {
|
|||||||
environment name: 'EXIT_STATUS', value: ''
|
environment name: 'EXIT_STATUS', value: ''
|
||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
withCredentials([
|
retry_backoff(5,5) {
|
||||||
[
|
sh '''#! /bin/bash
|
||||||
$class: 'UsernamePasswordMultiBinding',
|
set -e
|
||||||
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
|
for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
|
||||||
usernameVariable: 'DOCKERUSER',
|
[[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}"
|
||||||
passwordVariable: 'DOCKERPASS'
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
],
|
for i in "${CACHE[@]}"; do
|
||||||
[
|
if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
|
||||||
$class: 'UsernamePasswordMultiBinding',
|
CACHEIMAGE=${i}
|
||||||
credentialsId: 'Quay.io-Robot',
|
fi
|
||||||
usernameVariable: 'QUAYUSER',
|
|
||||||
passwordVariable: 'QUAYPASS'
|
|
||||||
]
|
|
||||||
]) {
|
|
||||||
retry(5) {
|
|
||||||
sh '''#! /bin/bash
|
|
||||||
set -e
|
|
||||||
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin
|
|
||||||
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
|
||||||
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
|
|
||||||
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
|
|
||||||
for PUSHIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do
|
|
||||||
docker tag ${IMAGE}:${META_TAG} ${PUSHIMAGE}:${META_TAG}
|
|
||||||
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:latest
|
|
||||||
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${EXT_RELEASE_TAG}
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${SEMVER}
|
|
||||||
fi
|
|
||||||
docker push ${PUSHIMAGE}:latest
|
|
||||||
docker push ${PUSHIMAGE}:${META_TAG}
|
|
||||||
docker push ${PUSHIMAGE}:${EXT_RELEASE_TAG}
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker push ${PUSHIMAGE}:${SEMVER}
|
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
'''
|
docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:latest -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
}
|
if [ -n "${SEMVER}" ]; then
|
||||||
|
docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -740,85 +941,34 @@ pipeline {
|
|||||||
environment name: 'EXIT_STATUS', value: ''
|
environment name: 'EXIT_STATUS', value: ''
|
||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
withCredentials([
|
retry_backoff(5,5) {
|
||||||
[
|
sh '''#! /bin/bash
|
||||||
$class: 'UsernamePasswordMultiBinding',
|
set -e
|
||||||
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
|
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
|
||||||
usernameVariable: 'DOCKERUSER',
|
[[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}"
|
||||||
passwordVariable: 'DOCKERPASS'
|
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
|
||||||
],
|
for i in "${CACHE[@]}"; do
|
||||||
[
|
if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
|
||||||
$class: 'UsernamePasswordMultiBinding',
|
CACHEIMAGE=${i}
|
||||||
credentialsId: 'Quay.io-Robot',
|
fi
|
||||||
usernameVariable: 'QUAYUSER',
|
|
||||||
passwordVariable: 'QUAYPASS'
|
|
||||||
]
|
|
||||||
]) {
|
|
||||||
retry(5) {
|
|
||||||
sh '''#! /bin/bash
|
|
||||||
set -e
|
|
||||||
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin
|
|
||||||
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
|
|
||||||
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
|
|
||||||
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
|
|
||||||
if [ "${CI}" == "false" ]; then
|
|
||||||
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
|
|
||||||
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
|
|
||||||
fi
|
|
||||||
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
|
|
||||||
docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG}
|
|
||||||
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest
|
|
||||||
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
|
|
||||||
docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
|
|
||||||
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest
|
|
||||||
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER}
|
|
||||||
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
|
|
||||||
fi
|
|
||||||
docker push ${MANIFESTIMAGE}:amd64-${META_TAG}
|
|
||||||
docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
|
|
||||||
docker push ${MANIFESTIMAGE}:amd64-latest
|
|
||||||
docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG}
|
|
||||||
docker push ${MANIFESTIMAGE}:arm64v8-latest
|
|
||||||
docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker push ${MANIFESTIMAGE}:amd64-${SEMVER}
|
|
||||||
docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER}
|
|
||||||
fi
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:latest || :
|
|
||||||
docker manifest create ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
|
|
||||||
docker manifest annotate ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:arm64v8-latest --os linux --arch arm64 --variant v8
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG} || :
|
|
||||||
docker manifest create ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
|
|
||||||
docker manifest annotate ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} --os linux --arch arm64 --variant v8
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} || :
|
|
||||||
docker manifest create ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
|
|
||||||
docker manifest annotate ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} --os linux --arch arm64 --variant v8
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER} || :
|
|
||||||
docker manifest create ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
|
|
||||||
docker manifest annotate ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} --os linux --arch arm64 --variant v8
|
|
||||||
fi
|
|
||||||
token=$(curl -sX GET "https://ghcr.io/token?scope=repository%3Alinuxserver%2F${CONTAINER_NAME}%3Apull" | jq -r '.token')
|
|
||||||
digest=$(curl -s \
|
|
||||||
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
|
|
||||||
--header "Authorization: Bearer ${token}" \
|
|
||||||
"https://ghcr.io/v2/linuxserver/${CONTAINER_NAME}/manifests/arm32v7-latest")
|
|
||||||
if [[ $(echo "$digest" | jq -r '.layers') != "null" ]]; then
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-latest || :
|
|
||||||
docker manifest create ${MANIFESTIMAGE}:arm32v7-latest ${MANIFESTIMAGE}:amd64-latest
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-latest
|
|
||||||
fi
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:latest
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG}
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG}
|
|
||||||
if [ -n "${SEMVER}" ]; then
|
|
||||||
docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER}
|
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
'''
|
docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-latest -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
}
|
docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-latest -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
if [ -n "${SEMVER}" ]; then
|
||||||
|
docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
|
||||||
|
docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
|
||||||
|
docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
|
||||||
|
|
||||||
|
docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
|
||||||
|
if [ -n "${SEMVER}" ]; then
|
||||||
|
docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -839,50 +989,52 @@ pipeline {
|
|||||||
"object": "'${COMMIT_SHA}'",\
|
"object": "'${COMMIT_SHA}'",\
|
||||||
"message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\
|
"message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\
|
||||||
"type": "commit",\
|
"type": "commit",\
|
||||||
"tagger": {"name": "LinuxServer Jenkins","email": "jenkins@linuxserver.io","date": "'${GITHUB_DATE}'"}}' '''
|
"tagger": {"name": "LinuxServer-CI","email": "ci@linuxserver.io","date": "'${GITHUB_DATE}'"}}' '''
|
||||||
echo "Pushing New release for Tag"
|
echo "Pushing New release for Tag"
|
||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
echo "Updating external repo packages to ${EXT_RELEASE_CLEAN}" > releasebody.json
|
echo "Updating external repo packages to ${EXT_RELEASE_CLEAN}" > releasebody.json
|
||||||
echo '{"tag_name":"'${META_TAG}'",\
|
echo '{"tag_name":"'${META_TAG}'",\
|
||||||
"target_commitish": "master",\
|
"target_commitish": "master",\
|
||||||
"name": "'${META_TAG}'",\
|
"name": "'${META_TAG}'",\
|
||||||
"body": "**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Repo Changes:**\\n\\n' > start
|
"body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start
|
||||||
printf '","draft": false,"prerelease": false}' >> releasebody.json
|
printf '","draft": false,"prerelease": false}' >> releasebody.json
|
||||||
paste -d'\\0' start releasebody.json > releasebody.json.done
|
paste -d'\\0' start releasebody.json > releasebody.json.done
|
||||||
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
|
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Use helper container to sync the current README on master to the dockerhub endpoint
|
// Add protection to the release branch
|
||||||
stage('Sync-README') {
|
stage('Github-Release-Branch-Protection') {
|
||||||
when {
|
when {
|
||||||
|
branch "master"
|
||||||
environment name: 'CHANGE_ID', value: ''
|
environment name: 'CHANGE_ID', value: ''
|
||||||
environment name: 'EXIT_STATUS', value: ''
|
environment name: 'EXIT_STATUS', value: ''
|
||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
withCredentials([
|
echo "Setting up protection for release branch master"
|
||||||
[
|
sh '''#! /bin/bash
|
||||||
$class: 'UsernamePasswordMultiBinding',
|
curl -H "Authorization: token ${GITHUB_TOKEN}" -X PUT https://api.github.com/repos/${LS_USER}/${LS_REPO}/branches/master/protection \
|
||||||
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
|
-d $(jq -c . << EOF
|
||||||
usernameVariable: 'DOCKERUSER',
|
{
|
||||||
passwordVariable: 'DOCKERPASS'
|
"required_status_checks": null,
|
||||||
]
|
"enforce_admins": false,
|
||||||
]) {
|
"required_pull_request_reviews": {
|
||||||
sh '''#! /bin/bash
|
"dismiss_stale_reviews": false,
|
||||||
set -e
|
"require_code_owner_reviews": false,
|
||||||
TEMPDIR=$(mktemp -d)
|
"require_last_push_approval": false,
|
||||||
docker pull ghcr.io/linuxserver/jenkins-builder:latest
|
"required_approving_review_count": 1
|
||||||
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH="${BRANCH_NAME}" -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest
|
},
|
||||||
docker pull ghcr.io/linuxserver/readme-sync
|
"restrictions": null,
|
||||||
docker run --rm=true \
|
"required_linear_history": false,
|
||||||
-e DOCKERHUB_USERNAME=$DOCKERUSER \
|
"allow_force_pushes": false,
|
||||||
-e DOCKERHUB_PASSWORD=$DOCKERPASS \
|
"allow_deletions": false,
|
||||||
-e GIT_REPOSITORY=${LS_USER}/${LS_REPO} \
|
"block_creations": false,
|
||||||
-e DOCKER_REPOSITORY=${IMAGE} \
|
"required_conversation_resolution": true,
|
||||||
-e GIT_BRANCH=master \
|
"lock_branch": false,
|
||||||
-v ${TEMPDIR}/docker-${CONTAINER_NAME}:/mnt \
|
"allow_fork_syncing": false,
|
||||||
ghcr.io/linuxserver/readme-sync bash -c 'node sync'
|
"required_signatures": false
|
||||||
rm -Rf ${TEMPDIR} '''
|
}
|
||||||
}
|
EOF
|
||||||
|
) '''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If this is a Pull request send the CI link as a comment on it
|
// If this is a Pull request send the CI link as a comment on it
|
||||||
@@ -969,32 +1121,94 @@ pipeline {
|
|||||||
###################### */
|
###################### */
|
||||||
post {
|
post {
|
||||||
always {
|
always {
|
||||||
|
sh '''#!/bin/bash
|
||||||
|
rm -rf /config/.ssh/id_sign
|
||||||
|
rm -rf /config/.ssh/id_sign.pub
|
||||||
|
git config --global --unset gpg.format
|
||||||
|
git config --global --unset user.signingkey
|
||||||
|
git config --global --unset commit.gpgsign
|
||||||
|
'''
|
||||||
script{
|
script{
|
||||||
|
env.JOB_DATE = sh(
|
||||||
|
script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''',
|
||||||
|
returnStdout: true).trim()
|
||||||
if (env.EXIT_STATUS == "ABORTED"){
|
if (env.EXIT_STATUS == "ABORTED"){
|
||||||
sh 'echo "build aborted"'
|
sh 'echo "build aborted"'
|
||||||
}
|
}else{
|
||||||
else if (currentBuild.currentResult == "SUCCESS"){
|
if (currentBuild.currentResult == "SUCCESS"){
|
||||||
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 1681177,\
|
if (env.GITHUBIMAGE =~ /lspipepr/){
|
||||||
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** Success\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
|
env.JOB_WEBHOOK_STATUS='Success'
|
||||||
"username": "Jenkins"}' ${BUILDS_DISCORD} '''
|
env.JOB_WEBHOOK_COLOUR=3957028
|
||||||
}
|
env.JOB_WEBHOOK_FOOTER='PR Build'
|
||||||
else {
|
}else if (env.GITHUBIMAGE =~ /lsiodev/){
|
||||||
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 16711680,\
|
env.JOB_WEBHOOK_STATUS='Success'
|
||||||
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** failure\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
|
env.JOB_WEBHOOK_COLOUR=3957028
|
||||||
|
env.JOB_WEBHOOK_FOOTER='Dev Build'
|
||||||
|
}else{
|
||||||
|
env.JOB_WEBHOOK_STATUS='Success'
|
||||||
|
env.JOB_WEBHOOK_COLOUR=1681177
|
||||||
|
env.JOB_WEBHOOK_FOOTER='Live Build'
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
if (env.GITHUBIMAGE =~ /lspipepr/){
|
||||||
|
env.JOB_WEBHOOK_STATUS='Failure'
|
||||||
|
env.JOB_WEBHOOK_COLOUR=12669523
|
||||||
|
env.JOB_WEBHOOK_FOOTER='PR Build'
|
||||||
|
}else if (env.GITHUBIMAGE =~ /lsiodev/){
|
||||||
|
env.JOB_WEBHOOK_STATUS='Failure'
|
||||||
|
env.JOB_WEBHOOK_COLOUR=12669523
|
||||||
|
env.JOB_WEBHOOK_FOOTER='Dev Build'
|
||||||
|
}else{
|
||||||
|
env.JOB_WEBHOOK_STATUS='Failure'
|
||||||
|
env.JOB_WEBHOOK_COLOUR=16711680
|
||||||
|
env.JOB_WEBHOOK_FOOTER='Live Build'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"'color'": '${JOB_WEBHOOK_COLOUR}',\
|
||||||
|
"footer": {"text" : "'"${JOB_WEBHOOK_FOOTER}"'"},\
|
||||||
|
"timestamp": "'${JOB_DATE}'",\
|
||||||
|
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** '${JOB_WEBHOOK_STATUS}'\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
|
||||||
"username": "Jenkins"}' ${BUILDS_DISCORD} '''
|
"username": "Jenkins"}' ${BUILDS_DISCORD} '''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cleanup {
|
cleanup {
|
||||||
sh '''#! /bin/bash
|
sh '''#! /bin/bash
|
||||||
echo "Performing docker system prune!!"
|
echo "Pruning builder!!"
|
||||||
containers=$(docker ps -aq)
|
docker builder prune -f --builder container || :
|
||||||
|
containers=$(docker ps -q)
|
||||||
if [[ -n "${containers}" ]]; then
|
if [[ -n "${containers}" ]]; then
|
||||||
docker stop ${containers}
|
BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
|
||||||
|
for container in ${containers}; do
|
||||||
|
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
|
||||||
|
echo "skipping buildx container in docker stop"
|
||||||
|
else
|
||||||
|
echo "Stopping container ${container}"
|
||||||
|
docker stop ${container}
|
||||||
|
fi
|
||||||
|
done
|
||||||
fi
|
fi
|
||||||
docker system prune -af --volumes || :
|
docker system prune -f --volumes || :
|
||||||
|
docker image prune -af || :
|
||||||
'''
|
'''
|
||||||
cleanWs()
|
cleanWs()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def retry_backoff(int max_attempts, int power_base, Closure c) {
|
||||||
|
int n = 0
|
||||||
|
while (n < max_attempts) {
|
||||||
|
try {
|
||||||
|
c()
|
||||||
|
return
|
||||||
|
} catch (err) {
|
||||||
|
if ((n + 1) >= max_attempts) {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
sleep(power_base ** n)
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|||||||
187
README.md
187
README.md
@@ -1,12 +1,10 @@
|
|||||||
<!-- DO NOT EDIT THIS FILE MANUALLY -->
|
<!-- DO NOT EDIT THIS FILE MANUALLY -->
|
||||||
<!-- Please read the https://github.com/linuxserver/docker-transmission/blob/master/.github/CONTRIBUTING.md -->
|
<!-- Please read https://github.com/linuxserver/docker-transmission/blob/master/.github/CONTRIBUTING.md -->
|
||||||
|
|
||||||
[](https://linuxserver.io)
|
[](https://linuxserver.io)
|
||||||
|
|
||||||
[](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!")
|
[](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!")
|
||||||
[](https://discord.gg/YWrKVTn "realtime support / chat with the community and the team.")
|
[](https://linuxserver.io/discord "realtime support / chat with the community and the team.")
|
||||||
[](https://discourse.linuxserver.io "post on our community forum.")
|
[](https://discourse.linuxserver.io "post on our community forum.")
|
||||||
[](https://fleet.linuxserver.io "an online web interface which displays all of our maintained images.")
|
|
||||||
[](https://github.com/linuxserver "view the source for all of our repositories.")
|
[](https://github.com/linuxserver "view the source for all of our repositories.")
|
||||||
[](https://opencollective.com/linuxserver "please consider helping us by either donating or contributing to our budget")
|
[](https://opencollective.com/linuxserver "please consider helping us by either donating or contributing to our budget")
|
||||||
|
|
||||||
@@ -21,15 +19,14 @@ The [LinuxServer.io](https://linuxserver.io) team brings you another container r
|
|||||||
Find us at:
|
Find us at:
|
||||||
|
|
||||||
* [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more!
|
* [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more!
|
||||||
* [Discord](https://discord.gg/YWrKVTn) - realtime support / chat with the community and the team.
|
* [Discord](https://linuxserver.io/discord) - realtime support / chat with the community and the team.
|
||||||
* [Discourse](https://discourse.linuxserver.io) - post on our community forum.
|
* [Discourse](https://discourse.linuxserver.io) - post on our community forum.
|
||||||
* [Fleet](https://fleet.linuxserver.io) - an online web interface which displays all of our maintained images.
|
|
||||||
* [GitHub](https://github.com/linuxserver) - view the source for all of our repositories.
|
* [GitHub](https://github.com/linuxserver) - view the source for all of our repositories.
|
||||||
* [Open Collective](https://opencollective.com/linuxserver) - please consider helping us by either donating or contributing to our budget
|
* [Open Collective](https://opencollective.com/linuxserver) - please consider helping us by either donating or contributing to our budget
|
||||||
|
|
||||||
# [linuxserver/transmission](https://github.com/linuxserver/docker-transmission)
|
# [linuxserver/transmission](https://github.com/linuxserver/docker-transmission)
|
||||||
|
|
||||||
[](https://scarf.sh/gateway/linuxserver-ci/docker/linuxserver%2Ftransmission)
|
[](https://scarf.sh)
|
||||||
[](https://github.com/linuxserver/docker-transmission)
|
[](https://github.com/linuxserver/docker-transmission)
|
||||||
[](https://github.com/linuxserver/docker-transmission/releases)
|
[](https://github.com/linuxserver/docker-transmission/releases)
|
||||||
[](https://github.com/linuxserver/docker-transmission/packages)
|
[](https://github.com/linuxserver/docker-transmission/packages)
|
||||||
@@ -46,7 +43,7 @@ Find us at:
|
|||||||
|
|
||||||
## Supported Architectures
|
## Supported Architectures
|
||||||
|
|
||||||
We utilise the docker manifest for multi-platform awareness. More information is available from docker [here](https://github.com/docker/distribution/blob/master/docs/spec/manifest-v2-2.md#manifest-list) and our announcement [here](https://blog.linuxserver.io/2019/02/21/the-lsio-pipeline-project/).
|
We utilise the docker manifest for multi-platform awareness. More information is available from docker [here](https://distribution.github.io/distribution/spec/manifest-v2-2/#manifest-list) and our announcement [here](https://blog.linuxserver.io/2019/02/21/the-lsio-pipeline-project/).
|
||||||
|
|
||||||
Simply pulling `lscr.io/linuxserver/transmission:latest` should retrieve the correct image for your arch, but you can also pull specific arch images via tags.
|
Simply pulling `lscr.io/linuxserver/transmission:latest` should retrieve the correct image for your arch, but you can also pull specific arch images via tags.
|
||||||
|
|
||||||
@@ -56,7 +53,6 @@ The architectures supported by this image are:
|
|||||||
| :----: | :----: | ---- |
|
| :----: | :----: | ---- |
|
||||||
| x86-64 | ✅ | amd64-\<version tag\> |
|
| x86-64 | ✅ | amd64-\<version tag\> |
|
||||||
| arm64 | ✅ | arm64v8-\<version tag\> |
|
| arm64 | ✅ | arm64v8-\<version tag\> |
|
||||||
| armhf | ❌ | |
|
|
||||||
|
|
||||||
## Application Setup
|
## Application Setup
|
||||||
|
|
||||||
@@ -84,15 +80,25 @@ Use `HOST_WHITELIST` to enable an list of dns names as host-whitelist. This enab
|
|||||||
|
|
||||||
Use `PEERPORT` to specify the port(s) Transmission should listen on. This disables random port selection. This should be the same as the port mapped in your docker configuration.
|
Use `PEERPORT` to specify the port(s) Transmission should listen on. This disables random port selection. This should be the same as the port mapped in your docker configuration.
|
||||||
|
|
||||||
|
## Read-Only Operation
|
||||||
|
|
||||||
|
This image can be run with a read-only container filesystem. For details please [read the docs](https://docs.linuxserver.io/misc/read-only/).
|
||||||
|
|
||||||
|
## Non-Root Operation
|
||||||
|
|
||||||
|
This image can be run with a non-root user. For details please [read the docs](https://docs.linuxserver.io/misc/non-root/).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Here are some example snippets to help you get started creating a container.
|
To help you get started creating a container from this image you can either use docker-compose or the docker cli.
|
||||||
|
|
||||||
|
>[!NOTE]
|
||||||
|
>Unless a parameter is flaged as 'optional', it is *mandatory* and a value must be provided.
|
||||||
|
|
||||||
### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose))
|
### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose))
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
---
|
---
|
||||||
version: "2.1"
|
|
||||||
services:
|
services:
|
||||||
transmission:
|
transmission:
|
||||||
image: lscr.io/linuxserver/transmission:latest
|
image: lscr.io/linuxserver/transmission:latest
|
||||||
@@ -108,9 +114,9 @@ services:
|
|||||||
- PEERPORT= #optional
|
- PEERPORT= #optional
|
||||||
- HOST_WHITELIST= #optional
|
- HOST_WHITELIST= #optional
|
||||||
volumes:
|
volumes:
|
||||||
- /path/to/data:/config
|
- /path/to/transmission/data:/config
|
||||||
- /path/to/downloads:/downloads
|
- /path/to/downloads:/downloads #optional
|
||||||
- /path/to/watch/folder:/watch
|
- /path/to/watch/folder:/watch #optional
|
||||||
ports:
|
ports:
|
||||||
- 9091:9091
|
- 9091:9091
|
||||||
- 51413:51413
|
- 51413:51413
|
||||||
@@ -135,23 +141,22 @@ docker run -d \
|
|||||||
-p 9091:9091 \
|
-p 9091:9091 \
|
||||||
-p 51413:51413 \
|
-p 51413:51413 \
|
||||||
-p 51413:51413/udp \
|
-p 51413:51413/udp \
|
||||||
-v /path/to/data:/config \
|
-v /path/to/transmission/data:/config \
|
||||||
-v /path/to/downloads:/downloads \
|
-v /path/to/downloads:/downloads `#optional` \
|
||||||
-v /path/to/watch/folder:/watch \
|
-v /path/to/watch/folder:/watch `#optional` \
|
||||||
--restart unless-stopped \
|
--restart unless-stopped \
|
||||||
lscr.io/linuxserver/transmission:latest
|
lscr.io/linuxserver/transmission:latest
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Parameters
|
## Parameters
|
||||||
|
|
||||||
Container images are configured using parameters passed at runtime (such as those above). These parameters are separated by a colon and indicate `<external>:<internal>` respectively. For example, `-p 8080:80` would expose port `80` from inside the container to be accessible from the host's IP on port `8080` outside the container.
|
Containers are configured using parameters passed at runtime (such as those above). These parameters are separated by a colon and indicate `<external>:<internal>` respectively. For example, `-p 8080:80` would expose port `80` from inside the container to be accessible from the host's IP on port `8080` outside the container.
|
||||||
|
|
||||||
| Parameter | Function |
|
| Parameter | Function |
|
||||||
| :----: | --- |
|
| :----: | --- |
|
||||||
| `-p 9091` | WebUI |
|
| `-p 9091:9091` | WebUI |
|
||||||
| `-p 51413` | Torrent Port TCP |
|
| `-p 51413:51413` | Torrent Port TCP |
|
||||||
| `-p 51413/udp` | Torrent Port UDP |
|
| `-p 51413:51413/udp` | Torrent Port UDP |
|
||||||
| `-e PUID=1000` | for UserID - see below for explanation |
|
| `-e PUID=1000` | for UserID - see below for explanation |
|
||||||
| `-e PGID=1000` | for GroupID - see below for explanation |
|
| `-e PGID=1000` | for GroupID - see below for explanation |
|
||||||
| `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). |
|
| `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). |
|
||||||
@@ -164,6 +169,8 @@ Container images are configured using parameters passed at runtime (such as thos
|
|||||||
| `-v /config` | Where transmission should store config files and logs. |
|
| `-v /config` | Where transmission should store config files and logs. |
|
||||||
| `-v /downloads` | Local path for downloads. |
|
| `-v /downloads` | Local path for downloads. |
|
||||||
| `-v /watch` | Watch folder for torrent files. |
|
| `-v /watch` | Watch folder for torrent files. |
|
||||||
|
| `--read-only=true` | Run container with a read-only filesystem. Please [read the docs](https://docs.linuxserver.io/misc/read-only/). |
|
||||||
|
| `--user=1000:1000` | Run container with a non-root user. Please [read the docs](https://docs.linuxserver.io/misc/non-root/). |
|
||||||
|
|
||||||
## Environment variables from files (Docker secrets)
|
## Environment variables from files (Docker secrets)
|
||||||
|
|
||||||
@@ -172,10 +179,10 @@ You can set any environment variable from a file by using a special prepend `FIL
|
|||||||
As an example:
|
As an example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
-e FILE__PASSWORD=/run/secrets/mysecretpassword
|
-e FILE__MYVAR=/run/secrets/mysecretvariable
|
||||||
```
|
```
|
||||||
|
|
||||||
Will set the environment variable `PASSWORD` based on the contents of the `/run/secrets/mysecretpassword` file.
|
Will set the environment variable `MYVAR` based on the contents of the `/run/secrets/mysecretvariable` file.
|
||||||
|
|
||||||
## Umask for running applications
|
## Umask for running applications
|
||||||
|
|
||||||
@@ -184,15 +191,20 @@ Keep in mind umask is not chmod it subtracts from permissions based on it's valu
|
|||||||
|
|
||||||
## User / Group Identifiers
|
## User / Group Identifiers
|
||||||
|
|
||||||
When using volumes (`-v` flags) permissions issues can arise between the host OS and the container, we avoid this issue by allowing you to specify the user `PUID` and group `PGID`.
|
When using volumes (`-v` flags), permissions issues can arise between the host OS and the container, we avoid this issue by allowing you to specify the user `PUID` and group `PGID`.
|
||||||
|
|
||||||
Ensure any volume directories on the host are owned by the same user you specify and any permissions issues will vanish like magic.
|
Ensure any volume directories on the host are owned by the same user you specify and any permissions issues will vanish like magic.
|
||||||
|
|
||||||
In this instance `PUID=1000` and `PGID=1000`, to find yours use `id user` as below:
|
In this instance `PUID=1000` and `PGID=1000`, to find yours use `id your_user` as below:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ id username
|
id your_user
|
||||||
uid=1000(dockeruser) gid=1000(dockergroup) groups=1000(dockergroup)
|
```
|
||||||
|
|
||||||
|
Example output:
|
||||||
|
|
||||||
|
```text
|
||||||
|
uid=1000(your_user) gid=1000(your_user) groups=1000(your_user)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Docker Mods
|
## Docker Mods
|
||||||
@@ -203,53 +215,101 @@ We publish various [Docker Mods](https://github.com/linuxserver/docker-mods) to
|
|||||||
|
|
||||||
## Support Info
|
## Support Info
|
||||||
|
|
||||||
* Shell access whilst the container is running: `docker exec -it transmission /bin/bash`
|
* Shell access whilst the container is running:
|
||||||
* To monitor the logs of the container in realtime: `docker logs -f transmission`
|
|
||||||
* container version number
|
```bash
|
||||||
* `docker inspect -f '{{ index .Config.Labels "build_version" }}' transmission`
|
docker exec -it transmission /bin/bash
|
||||||
* image version number
|
```
|
||||||
* `docker inspect -f '{{ index .Config.Labels "build_version" }}' lscr.io/linuxserver/transmission:latest`
|
|
||||||
|
* To monitor the logs of the container in realtime:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker logs -f transmission
|
||||||
|
```
|
||||||
|
|
||||||
|
* Container version number:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker inspect -f '{{ index .Config.Labels "build_version" }}' transmission
|
||||||
|
```
|
||||||
|
|
||||||
|
* Image version number:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker inspect -f '{{ index .Config.Labels "build_version" }}' lscr.io/linuxserver/transmission:latest
|
||||||
|
```
|
||||||
|
|
||||||
## Updating Info
|
## Updating Info
|
||||||
|
|
||||||
Most of our images are static, versioned, and require an image update and container recreation to update the app inside. With some exceptions (ie. nextcloud, plex), we do not recommend or support updating apps inside the container. Please consult the [Application Setup](#application-setup) section above to see if it is recommended for the image.
|
Most of our images are static, versioned, and require an image update and container recreation to update the app inside. With some exceptions (noted in the relevant readme.md), we do not recommend or support updating apps inside the container. Please consult the [Application Setup](#application-setup) section above to see if it is recommended for the image.
|
||||||
|
|
||||||
Below are the instructions for updating containers:
|
Below are the instructions for updating containers:
|
||||||
|
|
||||||
### Via Docker Compose
|
### Via Docker Compose
|
||||||
|
|
||||||
* Update all images: `docker-compose pull`
|
* Update images:
|
||||||
* or update a single image: `docker-compose pull transmission`
|
* All images:
|
||||||
* Let compose update all containers as necessary: `docker-compose up -d`
|
|
||||||
* or update a single container: `docker-compose up -d transmission`
|
```bash
|
||||||
* You can also remove the old dangling images: `docker image prune`
|
docker-compose pull
|
||||||
|
```
|
||||||
|
|
||||||
|
* Single image:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose pull transmission
|
||||||
|
```
|
||||||
|
|
||||||
|
* Update containers:
|
||||||
|
* All containers:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
* Single container:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d transmission
|
||||||
|
```
|
||||||
|
|
||||||
|
* You can also remove the old dangling images:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker image prune
|
||||||
|
```
|
||||||
|
|
||||||
### Via Docker Run
|
### Via Docker Run
|
||||||
|
|
||||||
* Update the image: `docker pull lscr.io/linuxserver/transmission:latest`
|
* Update the image:
|
||||||
* Stop the running container: `docker stop transmission`
|
|
||||||
* Delete the container: `docker rm transmission`
|
```bash
|
||||||
|
docker pull lscr.io/linuxserver/transmission:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
* Stop the running container:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker stop transmission
|
||||||
|
```
|
||||||
|
|
||||||
|
* Delete the container:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker rm transmission
|
||||||
|
```
|
||||||
|
|
||||||
* Recreate a new container with the same docker run parameters as instructed above (if mapped correctly to a host folder, your `/config` folder and settings will be preserved)
|
* Recreate a new container with the same docker run parameters as instructed above (if mapped correctly to a host folder, your `/config` folder and settings will be preserved)
|
||||||
* You can also remove the old dangling images: `docker image prune`
|
* You can also remove the old dangling images:
|
||||||
|
|
||||||
### Via Watchtower auto-updater (only use if you don't remember the original parameters)
|
```bash
|
||||||
|
docker image prune
|
||||||
* Pull the latest image at its tag and replace it with the same env variables in one run:
|
```
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run --rm \
|
|
||||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
|
||||||
containrrr/watchtower \
|
|
||||||
--run-once transmission
|
|
||||||
```
|
|
||||||
|
|
||||||
* You can also remove the old dangling images: `docker image prune`
|
|
||||||
|
|
||||||
**Note:** We do not endorse the use of Watchtower as a solution to automated updates of existing Docker containers. In fact we generally discourage automated updates. However, this is a useful tool for one-time manual updates of containers where you have forgotten the original parameters. In the long term, we highly recommend using [Docker Compose](https://docs.linuxserver.io/general/docker-compose).
|
|
||||||
|
|
||||||
### Image Update Notifications - Diun (Docker Image Update Notifier)
|
### Image Update Notifications - Diun (Docker Image Update Notifier)
|
||||||
|
|
||||||
* We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported.
|
>[!TIP]
|
||||||
|
>We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported.
|
||||||
|
|
||||||
## Building locally
|
## Building locally
|
||||||
|
|
||||||
@@ -264,16 +324,19 @@ docker build \
|
|||||||
-t lscr.io/linuxserver/transmission:latest .
|
-t lscr.io/linuxserver/transmission:latest .
|
||||||
```
|
```
|
||||||
|
|
||||||
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static`
|
The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
|
||||||
```
|
```
|
||||||
|
|
||||||
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
|
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
|
||||||
|
|
||||||
## Versions
|
## Versions
|
||||||
|
|
||||||
|
* **29.11.24:** - Fix PEERPORT setting.
|
||||||
|
* **07.10.23:** - Install unrar from [linuxserver repo](https://github.com/linuxserver/docker-unrar).
|
||||||
|
* **10.08.23:** - Bump unrar to 6.2.10.
|
||||||
* **10.06.23:** - Bump unrar to 6.2.8, install transmission-extra.
|
* **10.06.23:** - Bump unrar to 6.2.8, install transmission-extra.
|
||||||
* **25.05.23:** - Deprecate armhf.
|
* **25.05.23:** - Deprecate armhf.
|
||||||
* **14.05.23:** - Explicitly install transmission-remote.
|
* **14.05.23:** - Explicitly install transmission-remote.
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ external_type: alpine_repo
|
|||||||
release_type: stable
|
release_type: stable
|
||||||
release_tag: latest
|
release_tag: latest
|
||||||
ls_branch: master
|
ls_branch: master
|
||||||
build_armhf: false
|
|
||||||
repo_vars:
|
repo_vars:
|
||||||
- BUILD_VERSION_ARG = 'TRANSMISSION_VERSION'
|
- BUILD_VERSION_ARG = 'TRANSMISSION_VERSION'
|
||||||
- LS_USER = 'linuxserver'
|
- LS_USER = 'linuxserver'
|
||||||
@@ -25,6 +24,6 @@ repo_vars:
|
|||||||
- CI_PORT='9091'
|
- CI_PORT='9091'
|
||||||
- CI_SSL='false'
|
- CI_SSL='false'
|
||||||
- CI_DELAY='120'
|
- CI_DELAY='120'
|
||||||
- CI_DOCKERENV='TZ=US/Pacific'
|
- CI_DOCKERENV=''
|
||||||
- CI_AUTH='user:password'
|
- CI_AUTH=''
|
||||||
- CI_WEBPATH=''
|
- CI_WEBPATH=''
|
||||||
|
|||||||
@@ -1,73 +1,74 @@
|
|||||||
NAME VERSION TYPE
|
NAME VERSION TYPE
|
||||||
7zip 22.01-r5 apk
|
7zip 25.01-r0 apk
|
||||||
alpine-baselayout 3.4.3-r1 apk
|
acl-libs 2.3.2-r1 apk
|
||||||
alpine-baselayout-data 3.4.3-r1 apk
|
alpine-baselayout 3.7.1-r1 apk
|
||||||
alpine-keys 2.4-r1 apk
|
alpine-baselayout-data 3.7.1-r1 apk
|
||||||
apk-tools 2.14.0-r5 apk
|
alpine-keys 2.6-r0 apk
|
||||||
bash 5.2.15-r5 apk
|
apk-tools 3.0.0_rc6-r0 apk
|
||||||
brotli-libs 1.0.9-r15 apk
|
bash 5.3.3-r1 apk
|
||||||
busybox 1.36.1 binary
|
brotli-libs 1.1.0-r2 apk
|
||||||
busybox 1.36.1-r1 apk
|
busybox 1.37.0-r24 apk
|
||||||
busybox-binsh 1.36.1-r1 apk
|
busybox-binsh 1.37.0-r24 apk
|
||||||
c-ares 1.19.1-r1 apk
|
c-ares 1.34.5-r0 apk
|
||||||
ca-certificates 20230506-r0 apk
|
ca-certificates 20250911-r0 apk
|
||||||
ca-certificates-bundle 20230506-r0 apk
|
ca-certificates-bundle 20250911-r0 apk
|
||||||
coreutils 9.3-r7 apk
|
catatonit 0.2.1-r0 apk
|
||||||
coreutils-env 9.3-r7 apk
|
coreutils 9.8-r1 apk
|
||||||
coreutils-fmt 9.3-r7 apk
|
coreutils-env 9.8-r1 apk
|
||||||
coreutils-sha512sum 9.3-r7 apk
|
coreutils-fmt 9.8-r1 apk
|
||||||
curl 8.1.2-r1 apk
|
coreutils-sha512sum 9.8-r1 apk
|
||||||
findutils 4.9.0-r5 apk
|
curl 8.16.0-r1 apk
|
||||||
gdbm 1.23-r1 apk
|
findutils 4.10.0-r0 apk
|
||||||
jq 1.6-r3 apk
|
gdbm 1.26-r0 apk
|
||||||
libacl 2.3.1-r4 apk
|
jq 1.8.1-r0 apk
|
||||||
libattr 2.5.1-r5 apk
|
libapk 3.0.0_rc6-r0 apk
|
||||||
libbsd 0.11.7-r2 apk
|
libattr 2.5.2-r2 apk
|
||||||
libbz2 1.0.8-r6 apk
|
libbsd 0.12.2-r0 apk
|
||||||
libc-utils 0.7.2-r5 apk
|
libbz2 1.0.8-r6 apk
|
||||||
libcrypto3 3.1.1-r1 apk
|
libcrypto3 3.5.4-r0 apk
|
||||||
libcurl 8.1.2-r1 apk
|
libcurl 8.16.0-r1 apk
|
||||||
libdeflate 1.18-r1 apk
|
libdeflate 1.24-r0 apk
|
||||||
libevent 2.1.12-r7 apk
|
libevent 2.1.12-r8 apk
|
||||||
libexpat 2.5.0-r2 apk
|
libexpat 2.7.3-r0 apk
|
||||||
libffi 3.4.4-r3 apk
|
libffi 3.5.2-r0 apk
|
||||||
libgcc 13.1.1_git20230617-r0 apk
|
libgcc 15.2.0-r0 apk
|
||||||
libidn2 2.3.4-r2 apk
|
libidn2 2.3.8-r0 apk
|
||||||
libintl 0.22-r0 apk
|
libintl 0.24.1-r1 apk
|
||||||
libmd 1.1.0-r0 apk
|
libmd 1.1.0-r0 apk
|
||||||
libncursesw 6.4_p20230625-r0 apk
|
libncursesw 6.5_p20250816-r0 apk
|
||||||
libpanelw 6.4_p20230625-r0 apk
|
libpanelw 6.5_p20250816-r0 apk
|
||||||
libproc2 4.0.3-r2 apk
|
libproc2 4.0.4-r3 apk
|
||||||
libpsl 0.21.2-r1 apk
|
libpsl 0.21.5-r3 apk
|
||||||
libssl3 3.1.1-r1 apk
|
libssl3 3.5.4-r0 apk
|
||||||
libstdc++ 13.1.1_git20230617-r0 apk
|
libstdc++ 15.2.0-r0 apk
|
||||||
libunistring 1.1-r2 apk
|
libunistring 1.3-r0 apk
|
||||||
linux-pam 1.5.3-r2 apk
|
linux-pam 1.7.0-r4 apk
|
||||||
miniupnpc 2.2.5-r0 apk
|
miniupnpc 2.3.2-r0 apk
|
||||||
mpdecimal 2.5.1-r2 apk
|
mpdecimal 4.0.1-r0 apk
|
||||||
musl 1.2.4-r1 apk
|
musl 1.2.5-r21 apk
|
||||||
musl-utils 1.2.4-r1 apk
|
musl-utils 1.2.5-r20 apk
|
||||||
ncurses-terminfo-base 6.4_p20230625-r0 apk
|
ncurses-terminfo-base 6.5_p20250816-r0 apk
|
||||||
netcat-openbsd 1.225-r0 apk
|
netcat-openbsd 1.229.1-r0 apk
|
||||||
nghttp2-libs 1.54.0-r0 apk
|
nghttp2-libs 1.67.1-r0 apk
|
||||||
oniguruma 6.9.8-r1 apk
|
nghttp3 1.11.0-r0 apk
|
||||||
procps-ng 4.0.3-r2 apk
|
oniguruma 6.9.10-r0 apk
|
||||||
pyc 0.1-r0 apk
|
procps-ng 4.0.4-r3 apk
|
||||||
python 3.11.4 binary
|
pyc 3.12.12-r0 apk
|
||||||
python3 3.11.4-r0 apk
|
python3 3.12.12-r0 apk
|
||||||
python3-pyc 3.11.4-r0 apk
|
python3-pyc 3.12.12-r0 apk
|
||||||
python3-pycache-pyc0 3.11.4-r0 apk
|
python3-pycache-pyc0 3.12.12-r0 apk
|
||||||
readline 8.2.1-r2 apk
|
readline 8.3.1-r0 apk
|
||||||
scanelf 1.3.7-r2 apk
|
scanelf 1.3.8-r2 apk
|
||||||
shadow 4.13-r6 apk
|
shadow 4.18.0-r0 apk
|
||||||
skalibs 2.13.1.1-r2 apk
|
skalibs-libs 2.14.4.0-r0 apk
|
||||||
sqlite-libs 3.42.0-r2 apk
|
sqlite-libs 3.50.4-r1 apk
|
||||||
ssl_client 1.36.1-r1 apk
|
ssl_client 1.37.0-r24 apk
|
||||||
transmission-cli 4.0.3-r3 apk
|
transmission-cli 4.0.6-r4 apk
|
||||||
transmission-daemon 4.0.3-r3 apk
|
transmission-daemon 4.0.6-r4 apk
|
||||||
transmission-extra 4.0.3-r3 apk
|
transmission-extra 4.0.6-r4 apk
|
||||||
transmission-remote 4.0.3-r3 apk
|
transmission-remote 4.0.6-r4 apk
|
||||||
tzdata 2023c-r1 apk
|
tzdata 2025b-r0 apk
|
||||||
utmps-libs 0.1.2.1-r2 apk
|
utmps-libs 0.1.3.1-r0 apk
|
||||||
xz-libs 5.4.3-r1 apk
|
xz-libs 5.8.1-r0 apk
|
||||||
zlib 1.2.13-r2 apk
|
zlib 1.3.1-r2 apk
|
||||||
|
zstd-libs 1.5.7-r2 apk
|
||||||
|
|||||||
191
readme-vars.yml
191
readme-vars.yml
@@ -6,51 +6,37 @@ project_url: "https://www.transmissionbt.com/"
|
|||||||
project_logo: "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/transmission.png"
|
project_logo: "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/transmission.png"
|
||||||
project_blurb: "[{{ project_name|capitalize }}]({{ project_url }}) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more."
|
project_blurb: "[{{ project_name|capitalize }}]({{ project_url }}) is designed for easy, powerful use. Transmission has the features you want from a BitTorrent client: encryption, a web interface, peer exchange, magnet links, DHT, µTP, UPnP and NAT-PMP port forwarding, webseed support, watch directories, tracker editing, global and per-torrent speed limits, and more."
|
||||||
project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}"
|
project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}"
|
||||||
project_blurb_optional_extras_enabled: false
|
project_categories: "Downloaders"
|
||||||
|
|
||||||
# supported architectures
|
# supported architectures
|
||||||
available_architectures:
|
available_architectures:
|
||||||
- { arch: "{{ arch_x86_64 }}", tag: "amd64-latest"}
|
- {arch: "{{ arch_x86_64 }}", tag: "amd64-latest"}
|
||||||
- { arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"}
|
- {arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"}
|
||||||
|
|
||||||
# development version
|
|
||||||
development_versions: false
|
|
||||||
|
|
||||||
# container parameters
|
# container parameters
|
||||||
common_param_env_vars_enabled: true
|
common_param_env_vars_enabled: true
|
||||||
param_container_name: "{{ project_name }}"
|
param_container_name: "{{ project_name }}"
|
||||||
param_usage_include_net: false
|
|
||||||
param_usage_include_env: true
|
|
||||||
param_env_vars:
|
|
||||||
- { env_var: "TZ", env_value: "Europe/London", desc: "Specify a timezone to use EG Europe/London." }
|
|
||||||
param_usage_include_vols: true
|
param_usage_include_vols: true
|
||||||
param_volumes:
|
param_volumes:
|
||||||
- { vol_path: "/config", vol_host_path: "/path/to/data", desc: "Where transmission should store config files and logs." }
|
- {vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/data", desc: "Where transmission should store config files and logs."}
|
||||||
- { vol_path: "/downloads", vol_host_path: "/path/to/downloads", desc: "Local path for downloads." }
|
opt_param_usage_include_vols: true
|
||||||
- { vol_path: "/watch", vol_host_path: "/path/to/watch/folder", desc: "Watch folder for torrent files." }
|
opt_param_volumes:
|
||||||
|
- {vol_path: "/downloads", vol_host_path: "/path/to/downloads", desc: "Local path for downloads."}
|
||||||
|
- {vol_path: "/watch", vol_host_path: "/path/to/watch/folder", desc: "Watch folder for torrent files."}
|
||||||
param_usage_include_ports: true
|
param_usage_include_ports: true
|
||||||
param_ports:
|
param_ports:
|
||||||
- { external_port: "9091", internal_port: "9091", port_desc: "WebUI" }
|
- {external_port: "9091", internal_port: "9091", port_desc: "WebUI"}
|
||||||
- { external_port: "51413", internal_port: "51413", port_desc: "Torrent Port TCP" }
|
- {external_port: "51413", internal_port: "51413", port_desc: "Torrent Port TCP"}
|
||||||
- { external_port: "51413", internal_port: "51413/udp", port_desc: "Torrent Port UDP" }
|
- {external_port: "51413", internal_port: "51413/udp", port_desc: "Torrent Port UDP"}
|
||||||
param_device_map: false
|
|
||||||
cap_add_param: false
|
|
||||||
|
|
||||||
# optional container parameters
|
# optional container parameters
|
||||||
opt_param_usage_include_env: true
|
opt_param_usage_include_env: true
|
||||||
opt_param_env_vars:
|
opt_param_env_vars:
|
||||||
- { env_var: "TRANSMISSION_WEB_HOME", env_value: "", desc: "Specify the path to an alternative UI folder." }
|
- {env_var: "TRANSMISSION_WEB_HOME", env_value: "", desc: "Specify the path to an alternative UI folder."}
|
||||||
- { env_var: "USER", env_value: "", desc: "Specify an optional username for the interface" }
|
- {env_var: "USER", env_value: "", desc: "Specify an optional username for the interface"}
|
||||||
- { env_var: "PASS", env_value: "", desc: "Specify an optional password for the interface" }
|
- {env_var: "PASS", env_value: "", desc: "Specify an optional password for the interface"}
|
||||||
- { env_var: "WHITELIST", env_value: "", desc: "Specify an optional list of comma separated ip whitelist. Fills rpc-whitelist setting."}
|
- {env_var: "WHITELIST", env_value: "", desc: "Specify an optional list of comma separated ip whitelist. Fills rpc-whitelist setting."}
|
||||||
- { env_var: "PEERPORT", env_value: "", desc: "Specify an optional port for torrent TCP/UDP connections. Fills peer-port setting."}
|
- {env_var: "PEERPORT", env_value: "", desc: "Specify an optional port for torrent TCP/UDP connections. Fills peer-port setting."}
|
||||||
- { env_var: "HOST_WHITELIST", env_value: "", desc: "Specify an optional list of comma separated dns name whitelist. Fills rpc-host-whitelist setting."}
|
- {env_var: "HOST_WHITELIST", env_value: "", desc: "Specify an optional list of comma separated dns name whitelist. Fills rpc-host-whitelist setting."}
|
||||||
opt_param_usage_include_vols: false
|
readonly_supported: true
|
||||||
opt_param_usage_include_ports: false
|
nonroot_supported: true
|
||||||
opt_param_device_map: false
|
|
||||||
opt_cap_add_param: false
|
|
||||||
optional_block_1: false
|
|
||||||
|
|
||||||
# application setup block
|
# application setup block
|
||||||
app_setup_block_enabled: true
|
app_setup_block_enabled: true
|
||||||
app_setup_block: |
|
app_setup_block: |
|
||||||
@@ -73,54 +59,97 @@ app_setup_block: |
|
|||||||
Use `WHITELIST` to enable a list of ip as whitelist. This enable support for `rpc-whitelist`. When `WHITELIST` is empty support for whitelist is disabled.
|
Use `WHITELIST` to enable a list of ip as whitelist. This enable support for `rpc-whitelist`. When `WHITELIST` is empty support for whitelist is disabled.
|
||||||
|
|
||||||
Use `HOST_WHITELIST` to enable an list of dns names as host-whitelist. This enable support for `rpc-host-whitelist`. When `HOST_WHITELIST` is empty support for host-whitelist is disabled.
|
Use `HOST_WHITELIST` to enable an list of dns names as host-whitelist. This enable support for `rpc-host-whitelist`. When `HOST_WHITELIST` is empty support for host-whitelist is disabled.
|
||||||
|
|
||||||
## Use alternative Transmission torrent ports
|
|
||||||
|
|
||||||
Use `PEERPORT` to specify the port(s) Transmission should listen on. This disables random port selection. This should be the same as the port mapped in your docker configuration.
|
|
||||||
|
|
||||||
|
## Use alternative Transmission torrent ports
|
||||||
|
|
||||||
|
Use `PEERPORT` to specify the port(s) Transmission should listen on. This disables random port selection. This should be the same as the port mapped in your docker configuration.
|
||||||
|
# init diagram
|
||||||
|
init_diagram: |
|
||||||
|
"transmission:latest": {
|
||||||
|
docker-mods
|
||||||
|
base {
|
||||||
|
fix-attr +\nlegacy cont-init
|
||||||
|
}
|
||||||
|
docker-mods -> base
|
||||||
|
legacy-services
|
||||||
|
custom services
|
||||||
|
init-services -> legacy-services
|
||||||
|
init-services -> custom services
|
||||||
|
custom services -> legacy-services
|
||||||
|
legacy-services -> ci-service-check
|
||||||
|
init-migrations -> init-adduser
|
||||||
|
init-os-end -> init-config
|
||||||
|
init-config -> init-config-end
|
||||||
|
init-crontab-config -> init-config-end
|
||||||
|
init-transmission-config -> init-config-end
|
||||||
|
init-config -> init-crontab-config
|
||||||
|
init-mods-end -> init-custom-files
|
||||||
|
init-adduser -> init-device-perms
|
||||||
|
base -> init-envfile
|
||||||
|
base -> init-migrations
|
||||||
|
init-config-end -> init-mods
|
||||||
|
init-mods-package-install -> init-mods-end
|
||||||
|
init-mods -> init-mods-package-install
|
||||||
|
init-adduser -> init-os-end
|
||||||
|
init-device-perms -> init-os-end
|
||||||
|
init-envfile -> init-os-end
|
||||||
|
init-custom-files -> init-services
|
||||||
|
init-config -> init-transmission-config
|
||||||
|
init-services -> svc-cron
|
||||||
|
svc-cron -> legacy-services
|
||||||
|
init-services -> svc-transmission
|
||||||
|
svc-transmission -> legacy-services
|
||||||
|
}
|
||||||
|
Base Images: {
|
||||||
|
"baseimage-alpine:edge"
|
||||||
|
}
|
||||||
|
"transmission:latest" <- Base Images
|
||||||
# changelog
|
# changelog
|
||||||
changelogs:
|
changelogs:
|
||||||
- { date: "10.06.23:", desc: "Bump unrar to 6.2.8, install transmission-extra." }
|
- {date: "29.11.24:", desc: "Fix PEERPORT setting."}
|
||||||
- { date: "25.05.23:", desc: "Deprecate armhf." }
|
- {date: "07.10.23:", desc: "Install unrar from [linuxserver repo](https://github.com/linuxserver/docker-unrar)."}
|
||||||
- { date: "14.05.23:", desc: "Explicitly install transmission-remote." }
|
- {date: "10.08.23:", desc: "Bump unrar to 6.2.10."}
|
||||||
- { date: "02.03.23:", desc: "Add cron init to allow user customizable crontabs." }
|
- {date: "10.06.23:", desc: "Bump unrar to 6.2.8, install transmission-extra."}
|
||||||
- { date: "08.02.23:", desc: "Rebase to Alpine Edge to get access to most up to date builds of Transmission. Remove bundled 3rd party UI packages." }
|
- {date: "25.05.23:", desc: "Deprecate armhf."}
|
||||||
- { date: "05.01.23:", desc: "Rebase to Alpine 3.17, restore GNU findutils package." }
|
- {date: "14.05.23:", desc: "Explicitly install transmission-remote."}
|
||||||
- { date: "02.11.22:", desc: "Rebase to Alpine 3.16, migrate to s6v3." }
|
- {date: "02.03.23:", desc: "Add cron init to allow user customizable crontabs."}
|
||||||
- { date: "12.08.22:", desc: "Bump unrar to 6.1.7." }
|
- {date: "08.02.23:", desc: "Rebase to Alpine Edge to get access to most up to date builds of Transmission. Remove bundled 3rd party UI packages."}
|
||||||
- { date: "03.04.22:", desc: "Add Transmissionic as a UI option." }
|
- {date: "05.01.23:", desc: "Rebase to Alpine 3.17, restore GNU findutils package."}
|
||||||
- { date: "21.02.22:", desc: "Build unrar from source, rebase to Alpine 3.15, add symlinks neeeded for TWC. Credit @alexbelgium" }
|
- {date: "02.11.22:", desc: "Rebase to Alpine 3.16, migrate to s6v3."}
|
||||||
- { date: "09.07.21:", desc: "Wait for the transmission-daemon termination after a caught sigterm." }
|
- {date: "12.08.22:", desc: "Bump unrar to 6.1.7."}
|
||||||
- { date: "06.03.21:", desc: "Add Flood for Transmission as a UI option." }
|
- {date: "03.04.22:", desc: "Add Transmissionic as a UI option."}
|
||||||
- { date: "23.01.21:", desc: "Rebasing to alpine 3.13." }
|
- {date: "21.02.22:", desc: "Build unrar from source, rebase to Alpine 3.15, add symlinks neeeded for TWC. Credit @alexbelgium"}
|
||||||
- { date: "02.11.20:", desc: "Add ca-certificates package to allow connecting to https trackers." }
|
- {date: "09.07.21:", desc: "Wait for the transmission-daemon termination after a caught sigterm."}
|
||||||
- { date: "02.06.20:", desc: "Rebase to alpine 3.12, update to transmission 3.0, remove python2, add python3." }
|
- {date: "06.03.21:", desc: "Add Flood for Transmission as a UI option."}
|
||||||
- { date: "11.05.20:", desc: "Remove unnecessary chmod (remnant of previous change)." }
|
- {date: "23.01.21:", desc: "Rebasing to alpine 3.13."}
|
||||||
- { date: "28.04.20:", desc: "Use transmission-remote to update blocklist." }
|
- {date: "02.11.20:", desc: "Add ca-certificates package to allow connecting to https trackers."}
|
||||||
- { date: "30.03.20:", desc: "Internalize blocklist-update.sh." }
|
- {date: "02.06.20:", desc: "Rebase to alpine 3.12, update to transmission 3.0, remove python2, add python3."}
|
||||||
- { date: "29.03.20:", desc: "Update auth info in readme." }
|
- {date: "11.05.20:", desc: "Remove unnecessary chmod (remnant of previous change)."}
|
||||||
- { date: "19.12.19:", desc: "Rebasing to alpine 3.11." }
|
- {date: "28.04.20:", desc: "Use transmission-remote to update blocklist."}
|
||||||
- { date: "04.10.19:", desc: "Update package label." }
|
- {date: "30.03.20:", desc: "Internalize blocklist-update.sh."}
|
||||||
- { date: "21.08.19:", desc: "Add optional user/pass environment variables, fix transmission shut down if user/pass are set." }
|
- {date: "29.03.20:", desc: "Update auth info in readme."}
|
||||||
- { date: "19.07.19:", desc: "Send SIGTERM in blocklist update to properly close pid." }
|
- {date: "19.12.19:", desc: "Rebasing to alpine 3.11."}
|
||||||
- { date: "28.06.19:", desc: "Rebasing to alpine 3.10." }
|
- {date: "04.10.19:", desc: "Update package label."}
|
||||||
- { date: "23.03.19:", desc: "Switching to new Base images, shift to arm32v7 tag." }
|
- {date: "21.08.19:", desc: "Add optional user/pass environment variables, fix transmission shut down if user/pass are set."}
|
||||||
- { date: "22.02.19:", desc: "Rebase to Alpine 3.9, add themes to baseimage, add python and findutils." }
|
- {date: "19.07.19:", desc: "Send SIGTERM in blocklist update to properly close pid."}
|
||||||
- { date: "22.02.19:", desc: "Catch term and clean exit." }
|
- {date: "28.06.19:", desc: "Rebasing to alpine 3.10."}
|
||||||
- { date: "07.02.19:", desc: "Add pipeline logic and multi arch." }
|
- {date: "23.03.19:", desc: "Switching to new Base images, shift to arm32v7 tag."}
|
||||||
- { date: "15.08.18:", desc: "Rebase to alpine linux 3.8." }
|
- {date: "22.02.19:", desc: "Rebase to Alpine 3.9, add themes to baseimage, add python and findutils."}
|
||||||
- { date: "12.02.18:", desc: "Pull transmission from edge repo." }
|
- {date: "22.02.19:", desc: "Catch term and clean exit."}
|
||||||
- { date: "10.01.18:", desc: "Rebase to alpine linux 3.7." }
|
- {date: "07.02.19:", desc: "Add pipeline logic and multi arch."}
|
||||||
- { date: "25.07.17:", desc: "Add rsync package." }
|
- {date: "15.08.18:", desc: "Rebase to alpine linux 3.8."}
|
||||||
- { date: "27.05.17:", desc: "Rebase to alpine linux 3.6." }
|
- {date: "12.02.18:", desc: "Pull transmission from edge repo."}
|
||||||
- { date: "06.02.17:", desc: "Rebase to alpine linux 3.5." }
|
- {date: "10.01.18:", desc: "Rebase to alpine linux 3.7."}
|
||||||
- { date: "15.01.17:", desc: "Add p7zip, tar, unrar, and unzip packages." }
|
- {date: "25.07.17:", desc: "Add rsync package."}
|
||||||
- { date: "16.10.16:", desc: "Blocklist autoupdate with optional authentication." }
|
- {date: "27.05.17:", desc: "Rebase to alpine linux 3.6."}
|
||||||
- { date: "14.10.16:", desc: "Add version layer informationE." }
|
- {date: "06.02.17:", desc: "Rebase to alpine linux 3.5."}
|
||||||
- { date: "23.09.16:", desc: "Add information about securing the webui to README." }
|
- {date: "15.01.17:", desc: "Add p7zip, tar, unrar, and unzip packages."}
|
||||||
- { date: "21.09.16:", desc: "Add curl package." }
|
- {date: "16.10.16:", desc: "Blocklist autoupdate with optional authentication."}
|
||||||
- { date: "09.09.16:", desc: "Add layer badges to README." }
|
- {date: "14.10.16:", desc: "Add version layer informationE."}
|
||||||
- { date: "28.08.16:", desc: "Add badges to README." }
|
- {date: "23.09.16:", desc: "Add information about securing the webui to README."}
|
||||||
- { date: "09.08.16:", desc: "Rebase to alpine linux." }
|
- {date: "21.09.16:", desc: "Add curl package."}
|
||||||
- { date: "06.12.15:", desc: "Separate mapping for watch folder." }
|
- {date: "09.09.16:", desc: "Add layer badges to README."}
|
||||||
- { date: "16.11.15:", desc: "Initial Release." }
|
- {date: "28.08.16:", desc: "Add badges to README."}
|
||||||
|
- {date: "09.08.16:", desc: "Rebase to alpine linux."}
|
||||||
|
- {date: "06.12.15:", desc: "Separate mapping for watch folder."}
|
||||||
|
- {date: "16.11.15:", desc: "Initial Release."}
|
||||||
|
|||||||
@@ -2,11 +2,12 @@
|
|||||||
# shellcheck shell=bash
|
# shellcheck shell=bash
|
||||||
|
|
||||||
BLOCKLIST_ENABLED=$(jq -r '.["blocklist-enabled"]' /config/settings.json)
|
BLOCKLIST_ENABLED=$(jq -r '.["blocklist-enabled"]' /config/settings.json)
|
||||||
|
PORT=$(jq '.["rpc-port"]' /config/settings.json)
|
||||||
|
|
||||||
if [[ "$BLOCKLIST_ENABLED" == true ]]; then
|
if [[ "$BLOCKLIST_ENABLED" == true ]]; then
|
||||||
if [[ -n "$USER" ]] && [[ -n "$PASS" ]]; then
|
if [[ -n "$USER" ]] && [[ -n "$PASS" ]]; then
|
||||||
/usr/bin/transmission-remote -n "$USER":"$PASS" --blocklist-update
|
/usr/bin/transmission-remote 127.0.0.1:${PORT:-9091} -n "$USER":"$PASS" --blocklist-update
|
||||||
else
|
else
|
||||||
/usr/bin/transmission-remote --blocklist-update
|
/usr/bin/transmission-remote 127.0.0.1:${PORT:-9091} --blocklist-update
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
2
root/etc/crontabs/abc
Normal file
2
root/etc/crontabs/abc
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# min hour day month weekday command
|
||||||
|
0 3 * * * /app/blocklist-update.sh 2>&1
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
# do daily/weekly/monthly maintenance
|
|
||||||
# min hour day month weekday command
|
|
||||||
*/15 * * * * run-parts /etc/periodic/15min
|
|
||||||
0 * * * * run-parts /etc/periodic/hourly
|
|
||||||
0 2 * * * run-parts /etc/periodic/daily
|
|
||||||
0 3 * * 6 run-parts /etc/periodic/weekly
|
|
||||||
0 5 1 * * run-parts /etc/periodic/monthly
|
|
||||||
|
|
||||||
# run daily blocklist update
|
|
||||||
0 3 * * * /app/blocklist-update.sh 2>&1
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
# make folders
|
|
||||||
mkdir -p \
|
|
||||||
/config/crontabs
|
|
||||||
|
|
||||||
## root
|
|
||||||
# if crontabs do not exist in config
|
|
||||||
if [[ ! -f /config/crontabs/root ]]; then
|
|
||||||
# copy crontab from system
|
|
||||||
if crontab -l -u root; then
|
|
||||||
crontab -l -u root >/config/crontabs/root
|
|
||||||
fi
|
|
||||||
|
|
||||||
# if crontabs still do not exist in config (were not copied from system)
|
|
||||||
# copy crontab from included defaults (using -n, do not overwrite an existing file)
|
|
||||||
cp -n /etc/crontabs/root /config/crontabs/
|
|
||||||
fi
|
|
||||||
# set permissions and import user crontabs
|
|
||||||
lsiown root:root /config/crontabs/root
|
|
||||||
crontab -u root /config/crontabs/root
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
oneshot
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-crontabs-config/run
|
|
||||||
@@ -1,66 +1,69 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
#!/usr/bin/with-contenv bash
|
||||||
# shellcheck shell=bash
|
# shellcheck shell=bash
|
||||||
|
|
||||||
# make folders
|
# copy config
|
||||||
mkdir -p \
|
|
||||||
/downloads/{complete,incomplete} /watch
|
|
||||||
|
|
||||||
# copy config
|
|
||||||
if [[ ! -f /config/settings.json ]]; then
|
if [[ ! -f /config/settings.json ]]; then
|
||||||
cp /defaults/settings.json /config/settings.json
|
cp /defaults/settings.json /config/settings.json
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "$USER" ]] && [[ -n "$PASS" ]]; then
|
if [[ -n "${USER}" ]] && [[ -n "${PASS}" ]]; then
|
||||||
sed -i '/rpc-authentication-required/c\ "rpc-authentication-required": true,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-authentication-required"] = true' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/rpc-username/c\ \"rpc-username\": \"$USER\"," /config/settings.json
|
echo -E "$(jq -r --arg user "${USER}" '.["rpc-username"] = $user' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/rpc-password/c\ \"rpc-password\": \"$PASS\"," /config/settings.json
|
echo -E "$(jq -r --arg pass "${PASS}" '.["rpc-password"] = $pass' /config/settings.json)" >/config/settings.json
|
||||||
else
|
else
|
||||||
sed -i '/rpc-authentication-required/c\ "rpc-authentication-required": false,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-authentication-required"] = false' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/rpc-username/c\ \"rpc-username\": \"$USER\"," /config/settings.json
|
|
||||||
sed -i "/rpc-password/c\ \"rpc-password\": \"$PASS\"," /config/settings.json
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "$WHITELIST" ]]; then
|
if [[ -n "${WHITELIST}" ]]; then
|
||||||
sed -i '/rpc-whitelist-enabled/c\ "rpc-whitelist-enabled": true,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-whitelist-enabled"] = true' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/\"rpc-whitelist\"/c\ \"rpc-whitelist\": \"$WHITELIST\"," /config/settings.json
|
echo -E "$(jq -r --arg whitelist "${WHITELIST}" '.["rpc-whitelist"] = $whitelist' /config/settings.json)" >/config/settings.json
|
||||||
else
|
else
|
||||||
sed -i '/rpc-whitelist-enabled/c\ "rpc-whitelist-enabled": false,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-whitelist-enabled"] = false' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/\"rpc-whitelist\"/c\ \"rpc-whitelist\": \"$WHITELIST\"," /config/settings.json
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "$HOST_WHITELIST" ]]; then
|
if [[ -n "${HOST_WHITELIST}" ]]; then
|
||||||
sed -i '/rpc-host-whitelist-enabled/c\ "rpc-host-whitelist-enabled": true,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-host-whitelist-enabled"] = true' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/\"rpc-host-whitelist\"/c\ \"rpc-host-whitelist\": \"$HOST_WHITELIST\"," /config/settings.json
|
echo -E "$(jq -r --arg host_whitelist "${HOST_WHITELIST}" '.["rpc-host-whitelist"] = $host_whitelist' /config/settings.json)" >/config/settings.json
|
||||||
else
|
else
|
||||||
sed -i '/rpc-host-whitelist-enabled/c\ "rpc-host-whitelist-enabled": false,' /config/settings.json
|
echo -E "$(jq -r '.["rpc-host-whitelist-enabled"] = false' /config/settings.json)" >/config/settings.json
|
||||||
sed -i "/\"rpc-host-whitelist\"/c\ \"rpc-host-whitelist\": \"$HOST_WHITELIST\"," /config/settings.json
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "${PEERPORT}" ]]; then
|
if [[ -n "${PEERPORT}" ]]; then
|
||||||
sed -i "/\"peer-port\"/c\ \"peer-port\": ${PEERPORT}," /config/settings.json
|
echo -E "$(jq -r --argjson peerport "${PEERPORT}" '.["peer-port"] = $peerport' /config/settings.json)" >/config/settings.json
|
||||||
sed -i '/peer-port-random-on-start/c\ "peer-port-random-on-start": false,' /config/settings.json
|
echo -E "$(jq -r '.["peer-port-random-on-start"] = false' /config/settings.json)" >/config/settings.json
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Handle old theme locations
|
if [[ -n "${UMASK}" ]]; then
|
||||||
mkdir -p {/transmissionic,/combustion-release,/flood-for-transmission,/kettu,/transmission-web-control}
|
echo -E "$(jq -r --arg umask "${UMASK}" '.["umask"] = $umask' /config/settings.json)" >/config/settings.json
|
||||||
echo /transmissionic /combustion-release /flood-for-transmission /kettu /transmission-web-control | xargs -n1 ln -s /defaults/index.html
|
|
||||||
|
|
||||||
# permissions
|
|
||||||
lsiown abc:abc \
|
|
||||||
/config/settings.json
|
|
||||||
|
|
||||||
if [[ "$(stat -c '%U' /downloads)" != "abc" ]]; then
|
|
||||||
lsiown abc:abc /downloads
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$(stat -c '%U' /downloads/complete)" != "abc" ]]; then
|
if [[ -z ${LSIO_NON_ROOT_USER} ]] && [[ -z ${LSIO_READ_ONLY_FS} ]]; then
|
||||||
lsiown abc:abc /downloads/complete
|
# Handle old theme locations
|
||||||
|
mkdir -p {/transmissionic,/combustion-release,/flood-for-transmission,/kettu,/transmission-web-control}
|
||||||
|
echo /transmissionic /combustion-release /flood-for-transmission /kettu /transmission-web-control | xargs -n1 ln -s /defaults/index.html
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$(stat -c '%U' /downloads/incomplete)" != "abc" ]]; then
|
if [[ -z ${LSIO_NON_ROOT_USER} ]]; then
|
||||||
lsiown abc:abc /downloads/incomplete
|
lsiown -R abc:abc \
|
||||||
fi
|
/config
|
||||||
|
|
||||||
if [[ "$(stat -c '%U' /watch)" != "abc" ]]; then
|
if grep -qe ' /downloads ' /proc/mounts; then
|
||||||
lsiown abc:abc /watch
|
if [[ "$(stat -c '%U' /downloads)" != "abc" ]]; then
|
||||||
|
lsiown abc:abc /downloads
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$(stat -c '%U' /downloads/complete)" != "abc" ]]; then
|
||||||
|
lsiown abc:abc /downloads/complete
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$(stat -c '%U' /downloads/incomplete)" != "abc" ]]; then
|
||||||
|
lsiown abc:abc /downloads/incomplete
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -qe ' /watch ' /proc/mounts; then
|
||||||
|
if [[ "$(stat -c '%U' /watch)" != "abc" ]]; then
|
||||||
|
lsiown abc:abc /watch
|
||||||
|
fi
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
#!/usr/bin/with-contenv bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
exec /usr/sbin/crond -f -S -l 5
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
longrun
|
|
||||||
@@ -2,11 +2,12 @@
|
|||||||
# shellcheck shell=bash
|
# shellcheck shell=bash
|
||||||
|
|
||||||
pid=$(pidof transmission-daemon)
|
pid=$(pidof transmission-daemon)
|
||||||
|
PORT=$(jq '.["rpc-port"]' /config/settings.json)
|
||||||
|
|
||||||
if [[ -n "$USER" ]] && [[ -n "$PASS" ]]; then
|
if [[ -n "$USER" ]] && [[ -n "$PASS" ]]; then
|
||||||
/usr/bin/transmission-remote -n "$USER":"$PASS" --exit
|
/usr/bin/transmission-remote 127.0.0.1:${PORT:-9091} -n "$USER":"$PASS" --exit
|
||||||
else
|
else
|
||||||
/usr/bin/transmission-remote --exit
|
/usr/bin/transmission-remote 127.0.0.1:${PORT:-9091} --exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
tail --pid=${pid} -f /dev/null
|
tail --pid=${pid} -f /dev/null
|
||||||
|
|||||||
@@ -3,6 +3,12 @@
|
|||||||
|
|
||||||
PORT=$(jq '.["rpc-port"]' /config/settings.json)
|
PORT=$(jq '.["rpc-port"]' /config/settings.json)
|
||||||
|
|
||||||
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z localhost ${PORT:-9091}" \
|
if [[ -z ${LSIO_NON_ROOT_USER} ]]; then
|
||||||
s6-setuidgid abc /usr/bin/transmission-daemon \
|
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z localhost ${PORT:-9091}" \
|
||||||
-g /config -f
|
s6-setuidgid abc /usr/bin/transmission-daemon \
|
||||||
|
-g /config -f
|
||||||
|
else
|
||||||
|
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z localhost ${PORT:-9091}" \
|
||||||
|
/usr/bin/transmission-daemon \
|
||||||
|
-g /config -f
|
||||||
|
fi
|
||||||
|
|||||||
Reference in New Issue
Block a user