Compare commits

...

117 commits
3.17 ... master

Author SHA1 Message Date
Viktor Varland 458f79ca4b
ci: fuckin push to the right image
All checks were successful
build / build (push) Successful in 32s
2025-04-14 21:14:51 +02:00
Viktor Varland 6ea8b43b0d
docs: update readme
All checks were successful
build / build (push) Successful in 32s
2025-04-14 18:19:18 +02:00
Viktor Varland 113e0bf43b
ci: main => master
Some checks failed
build / build (push) Failing after 32s
2025-04-14 18:15:48 +02:00
Viktor Varland 6a6256acd4
ci: convert to meatbag 2025-04-14 18:13:03 +02:00
LinuxServer-CI ac17141dc8
Bot Updating Package Versions 2025-04-09 22:08:50 +00:00
Adam 2c1f03e3a7
Merge pull request #258 from linuxserver/master-usermod 2025-04-09 23:05:59 +01:00
thespad 4775b97056
Switch abc home to avoid chown penalty 2025-04-08 20:39:33 +01:00
LinuxServer-CI d9e9c8a664
Bot Updating Package Versions 2025-03-29 13:33:30 +00:00
LinuxServer-CI ec8542f160
Bot Updating Package Versions 2025-03-22 13:32:16 +00:00
LinuxServer-CI e1e80e60c0
Bot Updating Package Versions 2025-02-15 13:34:00 +00:00
LinuxServer-CI 59c62f1ae7
Bot Updating Templated Files 2025-02-15 13:29:35 +00:00
LinuxServer-CI 081f3bf4a3
Bot Updating Package Versions 2025-02-08 13:33:09 +00:00
LinuxServer-CI a34a58bd62
Bot Updating Templated Files 2025-02-01 13:30:31 +00:00
LinuxServer-CI 64ecb6745f
Bot Updating Templated Files 2025-02-01 13:28:48 +00:00
Adam ba27487240
Merge pull request #254 from linuxserver/new-readme 2025-01-19 10:50:10 +00:00
LinuxServer-CI 5e393c25ff
Bot Updating Package Versions 2025-01-18 13:29:41 +00:00
thespad 5a3ccf4cf0
Phrasing 2025-01-17 19:36:07 +00:00
thespad d48d182f0f
Add support lifecycle note 2025-01-17 19:34:04 +00:00
thespad 8beeaeca45
Update readme with new style 2025-01-17 18:08:34 +00:00
LinuxServer-CI 12873dd927
Bot Updating Package Versions 2025-01-10 19:42:27 +00:00
LinuxServer-CI 1730537b24
Bot Updating Package Versions 2025-01-04 13:33:17 +00:00
LinuxServer-CI 7d602a73c4
Bot Updating Package Versions 2024-12-28 11:37:34 +00:00
LinuxServer-CI 2f5c0f11df
Bot Updating Package Versions 2024-12-15 18:37:51 +00:00
LinuxServer-CI 2411390177
Bot Updating Templated Files 2024-12-15 18:33:52 +00:00
LinuxServer-CI 7777d8828d
Bot Updating Templated Files 2024-12-15 18:30:46 +00:00
LinuxServer-CI 795d744a0f
Bot Updating Package Versions 2024-12-05 16:13:31 +00:00
Adam 5dfe7d15c6
Merge pull request #251 from linuxserver/3.21-initial 2024-12-05 16:07:23 +00:00
thespad ff812c0651
Run builder 2024-12-05 14:28:14 +00:00
thespad 70b76baa2f
Enable SBOM/Provenance 2024-12-05 14:24:00 +00:00
thespad 8f35e8139b
Remove contenv symlinks so we don't overwrite them 2024-11-15 13:52:12 +00:00
thespad 6bd31eb477
Initial 3.21 release 2024-11-15 11:38:48 +00:00
LinuxServer-CI 481825d48e
Bot Updating Package Versions 2024-09-28 13:36:15 +00:00
LinuxServer-CI 2926dbe73c
Bot Updating Templated Files 2024-09-28 13:34:09 +00:00
LinuxServer-CI c6b5263eb1
Bot Updating Templated Files 2024-09-28 13:32:49 +00:00
LinuxServer-CI f272148962
Bot Updating Templated Files 2024-09-28 13:31:28 +00:00
LinuxServer-CI bb5a7b16e2
Bot Updating Package Versions 2024-09-21 13:31:58 +00:00
LinuxServer-CI 1983fb36bb
Bot Updating Package Versions 2024-09-14 13:30:55 +00:00
LinuxServer-CI 85bd229846
Bot Updating Package Versions 2024-09-07 13:31:45 +00:00
LinuxServer-CI 94ccace81c
Bot Updating Package Versions 2024-08-31 13:29:57 +00:00
LinuxServer-CI 9ec43eb872
Bot Updating Templated Files 2024-08-24 13:30:04 +00:00
LinuxServer-CI c822962c7a Bot Updating Templated Files 2024-08-24 13:28:45 +00:00
LinuxServer-CI a2b17e520a Bot Updating Package Versions 2024-07-27 13:28:58 +00:00
LinuxServer-CI 6567cde19c Bot Updating Package Versions 2024-07-13 13:31:42 +00:00
LinuxServer-CI 719f49cb0a Bot Updating Templated Files 2024-07-13 13:29:39 +00:00
LinuxServer-CI 5b1eb38ce6 Bot Updating Templated Files 2024-07-13 13:27:43 +00:00
Adam 34006fe7e7
Merge pull request #248 from linuxserver/3.20-ro
RO support hooks (3.20)
2024-07-02 15:31:58 +01:00
LinuxServer-CI af2c6f228e Bot Updating Package Versions 2024-06-29 13:28:00 +00:00
thespad b48c87eb2f
Linting 2024-06-26 23:09:02 +01:00
thespad ef81cb3b33
RO support hooks 2024-06-26 21:07:41 +01:00
LinuxServer-CI 56d8fbb73f Bot Updating Package Versions 2024-06-22 13:31:01 +00:00
LinuxServer-CI b9b51a1b93 Bot Updating Package Versions 2024-06-11 10:24:47 +00:00
Adam 46e3e8d74c
Merge pull request #242 from linuxserver/3.20-lsiown
Copy lsiown from repo (3.20)
2024-06-11 11:22:23 +01:00
thespad 4066c272d7
Copy lsiown from repo 2024-06-10 15:27:34 +01:00
LinuxServer-CI 28e54925ca Bot Updating Package Versions 2024-06-08 13:29:27 +00:00
LinuxServer-CI c4951e7021 Bot Updating Package Versions 2024-05-22 19:20:33 +00:00
LinuxServer-CI f2844953b3 Bot Updating Templated Files 2024-05-22 19:18:30 +00:00
Adam 552137dbb1
Merge pull request #238 from linuxserver/3.20-initial
Bump to 3.20
2024-05-22 20:16:26 +01:00
thespad 3ae6c4fd66
Remove errant indent 2024-05-22 20:09:34 +01:00
thespad cb6b200afc
Remove build_version 2024-05-22 20:07:10 +01:00
thespad 458334e3c1
Add build_version output on init where supported. 2024-05-22 19:05:19 +01:00
thespad d69c527089
Bump to 3.20 2024-05-16 22:32:59 +01:00
Roxedus 3d21020c64
Merge pull request #234 from linuxserver/3.19-remove-arm-warning 2024-05-14 14:25:38 +02:00
thespad 145020adaa
Remove armhf warning 2024-05-12 15:04:33 +01:00
LinuxServer-CI ed4057339b Bot Updating Package Versions 2024-05-08 18:51:54 +00:00
Roxedus 9268218a5f
Merge pull request #232 from linuxserver/master-catatonit 2024-05-08 20:49:30 +02:00
Roxedus cd796b3370
Add Catatonit 2024-05-06 19:54:32 +02:00
LinuxServer-CI 0b5f271f0e Bot Updating Templated Files 2024-05-04 13:27:48 +00:00
LinuxServer-CI 7cd7e1401c Bot Updating Package Versions 2024-04-17 13:47:46 +00:00
aptalca eb3cf22d0d
Merge pull request #227 from linuxserver/3.19-find
3.19: add gnu find
2024-04-17 09:45:26 -04:00
aptalca f47b402a1a
add gnu find 2024-04-13 16:03:18 -04:00
LinuxServer-CI 05763edeb2 Bot Updating Package Versions 2024-04-13 13:26:20 +00:00
LinuxServer-CI f023763fe6 Bot Updating Package Versions 2024-04-06 13:27:59 +00:00
LinuxServer-CI 402e4abbc3 Bot Updating Package Versions 2024-03-30 13:27:25 +00:00
LinuxServer-CI ff87e3bd5a Bot Updating Package Versions 2024-03-23 13:26:34 +00:00
LinuxServer-CI a0702347af Bot Updating Templated Files 2024-03-16 13:25:54 +00:00
LinuxServer-CI 6cfbdd72c6 Bot Updating Package Versions 2024-03-02 13:25:09 +00:00
LinuxServer-CI 2263515ffb Bot Updating Templated Files 2024-02-24 13:29:52 +00:00
LinuxServer-CI 9626561aea Bot Updating Templated Files 2024-02-24 13:27:27 +00:00
LinuxServer-CI 725df02e95 Bot Updating Package Versions 2024-02-03 13:27:13 +00:00
LinuxServer-CI bd4c1887c2 Bot Updating Package Versions 2024-01-27 13:25:17 +00:00
LinuxServer-CI f7a30b5942 Bot Updating Package Versions 2024-01-20 13:29:58 +00:00
LinuxServer-CI e971d0872f Bot Updating Package Versions 2024-01-12 10:21:03 +00:00
LinuxServer-CI a03942aac4 Bot Updating Templated Files 2024-01-12 10:15:59 +00:00
LinuxServer-CI 1bfc5cb7a7 Bot Updating Package Versions 2023-12-30 13:26:51 +00:00
LinuxServer-CI d2d759f136 Bot Updating Package Versions 2023-12-16 13:28:23 +00:00
LinuxServer-CI 443aa44015 Bot Updating Package Versions 2023-12-08 14:53:16 +00:00
Adam 36bfc31d62
Merge pull request #220 from linuxserver/3.19-initial
Release 3.19 to master
2023-12-08 14:50:21 +00:00
TheSpad 0e7a9b6fbd
Release 3.19 to master 2023-12-07 11:35:05 +00:00
LinuxServer-CI 07c4bf4594 Bot Updating Package Versions 2023-12-02 13:28:07 +00:00
Eric Nemchik 3a67103736
Merge pull request #218 from linuxserver/3.18-cron-user-file
Add cron user file
2023-11-20 11:58:06 -06:00
Eric Nemchik 330012ef01
Merge pull request #211 from linuxserver/3.18-etc-cron
Import crontabs from etc
2023-11-20 11:55:53 -06:00
Eric Nemchik 9b393a8668
Add cron user file
Signed-off-by: Eric Nemchik <eric@nemchik.com>
2023-11-20 11:04:54 -06:00
Eric Nemchik dbd643f8c3
Import crontabs from etc
Signed-off-by: Eric Nemchik <eric@nemchik.com>
2023-11-19 14:34:27 -06:00
Eric Nemchik bef0f4cee2
Merge pull request #208 from linuxserver/3.18-silent-cron
silent cron
2023-11-11 16:05:54 -06:00
Adam 2b1fc3f6c8
Merge pull request #198 from linuxserver/3.18-secrets
Warn about newlines in secrets (3.18)
2023-11-11 21:32:20 +00:00
TheSpad 09726961fd
!0 instead of 1 2023-11-11 20:02:44 +00:00
Eric Nemchik 937caa6f19
silent cron
Signed-off-by: Eric Nemchik <eric@nemchik.com>
2023-11-11 11:10:40 -06:00
LinuxServer-CI 3256bb4eeb Bot Updating Package Versions 2023-11-11 13:26:34 +00:00
TheSpad 6be18c0df7
Warn rather than sanitize 2023-11-11 10:35:47 +00:00
Eric Nemchik 89ebe1cc22
Merge pull request #201 from linuxserver/3.18-cron-redirect-stderr
Redirect stderr from cron
2023-11-02 16:12:05 -05:00
Eric Nemchik 8b9dbd0bed
Redirect stderr from cron
Signed-off-by: GitHub <noreply@github.com>
2023-11-02 17:09:46 +00:00
TheSpad 1b98fd9bc7
Fix env enumeration 2023-11-02 16:45:08 +00:00
TheSpad 7c6601c83a
Remove redundant if 2023-11-02 16:41:21 +00:00
Eric Nemchik cf5896ce7c
Merge pull request #191 from linuxserver/standard-cron
standard cron 3.18
2023-11-02 10:53:50 -05:00
TheSpad 6bb5231d17
Default to sanitizing newlines in secrets 2023-10-31 16:54:52 +00:00
LinuxServer-CI 00db510002 Bot Updating Package Versions 2023-10-28 13:29:48 +00:00
LinuxServer-CI 5543e3dd0f Bot Updating Package Versions 2023-10-14 13:27:41 +00:00
LinuxServer-CI a54b440055 Bot Updating Templated Files 2023-10-14 13:25:43 +00:00
LinuxServer-CI d971cf0873 Bot Updating Package Versions 2023-10-07 13:31:36 +00:00
LinuxServer-CI a8bbd50370 Bot Updating Templated Files 2023-10-07 13:28:15 +00:00
LinuxServer-CI 5461255904 Bot Updating Templated Files 2023-10-07 13:26:50 +00:00
LinuxServer-CI c060e20690 Bot Updating Package Versions 2023-09-30 13:26:54 +00:00
LinuxServer-CI fe39d670b7 Bot Updating Package Versions 2023-09-23 13:30:49 +00:00
LinuxServer-CI 1e9e41836b Bot Updating Package Versions 2023-09-02 13:27:12 +00:00
Eric Nemchik 0166b15838
Revert "cron in base"
This reverts commit b8c0295890.
2023-08-18 23:24:36 -05:00
LinuxServer-CI ab9e158526 Bot Updating Package Versions 2023-08-19 04:15:54 +00:00
Eric Nemchik b8c0295890
cron in base
Signed-off-by: Eric Nemchik <eric@nemchik.com>
2023-08-18 23:06:17 -05:00
34 changed files with 574 additions and 884 deletions

View file

@ -0,0 +1,15 @@
name: build
run-name: ${{ gitea.actor }} is building baseimage-alpine
on:
push:
branches:
- 'master'
jobs:
build:
runs-on: podman
steps:
- uses: actions/checkout@v3
- run: podman login --username registry --password ${{ secrets.MEATBAG_REGISTRY_TOKEN }} registry.meatbag.se
- run: podman build -t meatbag/baseimage-alpine:latest .
- run: podman push meatbag/baseimage-alpine registry.meatbag.se/meatbag/baseimage-alpine

View file

@ -1,123 +0,0 @@
# Contributing to baseimage-alpine
## Gotchas
* While contributing make sure to make all your changes before creating a Pull Request, as our pipeline builds each commit after the PR is open.
* Read, and fill the Pull Request template
* If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR
* If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message
* If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://discord.gg/YWrKVTn)
## Common files
| File | Use case |
| :----: | --- |
| `Dockerfile` | Dockerfile used to build amd64 images |
| `Dockerfile.aarch64` | Dockerfile used to build 64bit ARM architectures |
| `Dockerfile.armhf` | Dockerfile used to build 32bit ARM architectures |
| `Jenkinsfile` | This file is a product of our builder and should not be edited directly. This is used to build the image |
| `jenkins-vars.yml` | This file is used to generate the `Jenkinsfile` mentioned above, it only affects the build-process |
| `package_versions.txt` | This file is generated as a part of the build-process and should not be edited directly. It lists all the installed packages and their versions |
| `README.md` | This file is a product of our builder and should not be edited directly. This displays the readme for the repository and image registries |
| `readme-vars.yml` | This file is used to generate the `README.md` |
## Readme
If you would like to change our readme, please __**do not**__ directly edit the readme, as it is auto-generated on each commit.
Instead edit the [readme-vars.yml](https://github.com/linuxserver/docker-baseimage-alpine/edit/master/readme-vars.yml).
These variables are used in a template for our [Jenkins Builder](https://github.com/linuxserver/docker-jenkins-builder) as part of an ansible play.
Most of these variables are also carried over to [docs.linuxserver.io](https://docs.linuxserver.io)
### Fixing typos or clarify the text in the readme
There are variables for multiple parts of the readme, the most common ones are:
| Variable | Description |
| :----: | --- |
| `project_blurb` | This is the short excerpt shown above the project logo. |
| `app_setup_block` | This is the text that shows up under "Application Setup" if enabled |
### Parameters
The compose and run examples are also generated from these variables.
We have a [reference file](https://github.com/linuxserver/docker-jenkins-builder/blob/master/vars/_container-vars-blank) in our Jenkins Builder.
These are prefixed with `param_` for required parameters, or `opt_param` for optional parameters, except for `cap_add`.
Remember to enable param, if currently disabled. This differs between parameters, and can be seen in the reference file.
Devices, environment variables, ports and volumes expects its variables in a certain way.
### Devices
```yml
param_devices:
- { device_path: "/dev/dri", device_host_path: "/dev/dri", desc: "For hardware transcoding" }
opt_param_devices:
- { device_path: "/dev/dri", device_host_path: "/dev/dri", desc: "For hardware transcoding" }
```
### Environment variables
```yml
param_env_vars:
- { env_var: "TZ", env_value: "Europe/London", desc: "Specify a timezone to use EG Europe/London." }
opt_param_env_vars:
- { env_var: "VERSION", env_value: "latest", desc: "Supported values are LATEST, PLEXPASS or a specific version number." }
```
### Ports
```yml
param_ports:
- { external_port: "80", internal_port: "80", port_desc: "Application WebUI" }
opt_param_ports:
- { external_port: "80", internal_port: "80", port_desc: "Application WebUI" }
```
### Volumes
```yml
param_volumes:
- { vol_path: "/config", vol_host_path: "</path/to/appdata/config>", desc: "Configuration files." }
opt_param_volumes:
- { vol_path: "/config", vol_host_path: "</path/to/appdata/config>", desc: "Configuration files." }
```
### Testing template changes
After you make any changes to the templates, you can use our [Jenkins Builder](https://github.com/linuxserver/docker-jenkins-builder) to have the files updated from the modified templates. Please use the command found under `Running Locally` [on this page](https://github.com/linuxserver/docker-jenkins-builder/blob/master/README.md) to generate them prior to submitting a PR.
## Dockerfiles
We use multiple Dockerfiles in our repos, this is because sometimes some CPU architectures needs different packages to work.
If you are proposing additional packages to be added, ensure that you added the packages to all the Dockerfiles in alphabetical order.
### Testing your changes
```bash
git clone https://github.com/linuxserver/docker-baseimage-alpine.git
cd docker-baseimage-alpine
docker build \
--no-cache \
--pull \
-t linuxserver/baseimage-alpine:latest .
```
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static`
```bash
docker run --rm --privileged multiarch/qemu-user-static:register --reset
```
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
## Update the changelog
If you are modifying the Dockerfiles or any of the startup scripts in [root](https://github.com/linuxserver/docker-baseimage-alpine/tree/master/root), add an entry to the changelog
```yml
changelogs:
- { date: "DD.MM.YY:", desc: "Added some love to templates" }
```

2
.github/FUNDING.yml vendored
View file

@ -1,2 +0,0 @@
github: linuxserver
open_collective: linuxserver

View file

@ -1,13 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Discord chat support
url: https://discord.gg/YWrKVTn
about: Realtime support / chat with the community and the team.
- name: Discourse discussion forum
url: https://discourse.linuxserver.io
about: Post on our community forum.
- name: Documentation
url: https://docs.linuxserver.io
about: Documentation - information about all of our containers.

View file

@ -1,68 +0,0 @@
# Based on the issue template
name: Bug report
description: Create a report to help us improve
title: "[BUG] <title>"
labels: [Bug]
body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the bug you encountered.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Current Behavior
description: Tell us what happens instead of the expected behavior.
validations:
required: true
- type: textarea
attributes:
label: Expected Behavior
description: Tell us what should happen.
validations:
required: false
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the behavior.
placeholder: |
1. In this environment...
2. With this config...
3. Run '...'
4. See error...
validations:
required: true
- type: textarea
attributes:
label: Environment
description: |
examples:
- **OS**: Ubuntu 20.04
- **How docker service was installed**: distro's packagemanager
value: |
- OS:
- How docker service was installed:
render: markdown
validations:
required: false
- type: textarea
attributes:
label: Docker creation
description: |
Command used to create docker container
Provide your docker create/run command or compose yaml snippet, or a screenshot of settings if using a gui to create the container
render: bash
validations:
required: true
- type: textarea
attributes:
description: |
Provide a full docker log, output of "docker logs linuxserver.io"
label: Container logs
placeholder: |
Output of `docker logs linuxserver.io`
render: bash
validations:
required: true

View file

@ -1,31 +0,0 @@
# Based on the issue template
name: Feature request
description: Suggest an idea for this project
title: "[FEAT] <title>"
labels: [enhancement]
body:
- type: checkboxes
attributes:
label: Is this a new feature request?
description: Please search to see if a feature request already exists.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Wanted change
description: Tell us what you want to happen.
validations:
required: true
- type: textarea
attributes:
label: Reason for change
description: Justify your request, why do you want it, what is the benefit.
validations:
required: true
- type: textarea
attributes:
label: Proposed code change
description: Do you have a potential code change in mind?
validations:
required: false

View file

@ -1,43 +0,0 @@
<!--- Provide a general summary of your changes in the Title above -->
[linuxserverurl]: https://linuxserver.io
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)][linuxserverurl]
<!--- Before submitting a pull request please check the following -->
<!--- If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR -->
<!--- Ask yourself if this modification is something the whole userbase will benefit from, if this is a specific change for corner case functionality or plugins please look at making a Docker Mod or local script https://blog.linuxserver.io/2019/09/14/customizing-our-containers/ -->
<!--- That if the PR is addressing an existing issue include, closes #<issue number> , in the body of the PR commit message -->
<!--- You have included links to any files / patches etc your PR may be using in the body of the PR commit message -->
<!--- We maintain a changelog of major revisions to the container at the end of readme-vars.yml in the root of this repository, please add your changes there if appropriate -->
<!--- Coding guidelines: -->
<!--- 1. Installed packages in the Dockerfiles should be in alphabetical order -->
<!--- 2. Changes to Dockerfile should be replicated in Dockerfile.armhf and Dockerfile.aarch64 if applicable -->
<!--- 3. Indentation style (tabs vs 4 spaces vs 1 space) should match the rest of the document -->
<!--- 4. Readme is auto generated from readme-vars.yml, make your changes there -->
------------------------------
- [ ] I have read the [contributing](https://github.com/linuxserver/docker-baseimage-alpine/blob/master/.github/CONTRIBUTING.md) guideline and understand that I have made the correct modifications
------------------------------
<!--- We welcome all PRs though this doesnt guarantee it will be accepted. -->
## Description:
<!--- Describe your changes in detail -->
## Benefits of this PR and context:
<!--- Please explain why we should accept this PR. If this fixes an outstanding bug, please reference the issue # -->
## How Has This Been Tested?
<!--- Please describe in detail how you tested your changes. -->
<!--- Include details of your testing environment, and the tests you ran to -->
<!--- see how your change affects other areas of the code, etc. -->
## Source / References:
<!--- Please include any forum posts/github links relevant to the PR -->

View file

@ -1,16 +0,0 @@
name: Issue & PR Tracker
on:
issues:
types: [opened,reopened,labeled,unlabeled,closed]
pull_request_target:
types: [opened,reopened,review_requested,review_request_removed,labeled,unlabeled,closed]
pull_request_review:
types: [submitted,edited,dismissed]
jobs:
manage-project:
permissions:
issues: write
uses: linuxserver/github-workflows/.github/workflows/issue-pr-tracker.yml@v1
secrets: inherit

View file

@ -1,13 +0,0 @@
name: Mark stale issues and pull requests
on:
schedule:
- cron: '9 11 * * *'
workflow_dispatch:
jobs:
stale:
permissions:
issues: write
pull-requests: write
uses: linuxserver/github-workflows/.github/workflows/issues-cron.yml@v1
secrets: inherit

View file

@ -1,17 +0,0 @@
name: External Trigger Main
on:
workflow_dispatch:
jobs:
external-trigger-master:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3.1.0
- name: External Trigger
if: github.ref == 'refs/heads/master'
run: |
echo "**** No external release, exiting ****"
echo "No external release, exiting" >> $GITHUB_STEP_SUMMARY
exit 0

View file

@ -1,45 +0,0 @@
name: External Trigger Scheduler
on:
schedule:
- cron: '20 * * * *'
workflow_dispatch:
jobs:
external-trigger-scheduler:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3.1.0
with:
fetch-depth: '0'
- name: External Trigger Scheduler
run: |
echo "**** Branches found: ****"
git for-each-ref --format='%(refname:short)' refs/remotes
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
do
br=$(echo "$br" | sed 's|origin/||g')
echo "**** Evaluating branch ${br} ****"
ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-baseimage-alpine/${br}/jenkins-vars.yml)
ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
echo "**** Branch ${br} appears to be live and trigger is not os; checking workflow. ****"
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-baseimage-alpine/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****."
echo "Triggering external trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \
-H "Accept: application/vnd.github.v3+json" \
-d "{\"ref\":\"refs/heads/${br}\"}" \
https://api.github.com/repos/linuxserver/docker-baseimage-alpine/actions/workflows/external_trigger.yml/dispatches
else
echo "**** Workflow doesn't exist; skipping trigger. ****"
echo "Skipping branch ${br} due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi
else
echo "**** ${br} is either a dev branch, or has no external version; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
fi
done

View file

@ -1,13 +0,0 @@
name: Greetings
on: [pull_request_target, issues]
jobs:
greeting:
runs-on: ubuntu-latest
steps:
- uses: actions/first-interaction@v1
with:
issue-message: 'Thanks for opening your first issue here! Be sure to follow the relevant issue templates, or risk having this issue marked as invalid.'
pr-message: 'Thanks for opening this pull request! Be sure to follow the [pull request template](https://github.com/linuxserver/docker-baseimage-alpine/blob/master/.github/PULL_REQUEST_TEMPLATE.md)!'
repo-token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,42 +0,0 @@
name: Package Trigger Main
on:
workflow_dispatch:
jobs:
package-trigger-master:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3.1.0
- name: Package Trigger
if: github.ref == 'refs/heads/master'
run: |
if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_BASEIMAGE_ALPINE_MASTER }}" ]; then
echo "**** Github secret PAUSE_PACKAGE_TRIGGER_BASEIMAGE_ALPINE_MASTER is set; skipping trigger. ****"
echo "Github secret \`PAUSE_PACKAGE_TRIGGER_BASEIMAGE_ALPINE_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0
fi
if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-baseimage-alpine/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****"
echo "There already seems to be an active build on Jenkins; skipping package trigger" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_BASEIMAGE_ALPINE_MASTER\". ****"
echo "Package trigger running off of master branch. To disable, set a Github secret named \`PAUSE_PACKAGE_TRIGGER_BASEIMAGE_ALPINE_MASTER\`" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-baseimage-alpine/job/master/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
echo "**** Jenkins job queue url: ${response%$'\r'} ****"
echo "**** Sleeping 10 seconds until job starts ****"
sleep 10
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}"
echo "**** Jenkins job build url: ${buildurl} ****"
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****"
curl -iX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"

View file

@ -1,50 +0,0 @@
name: Package Trigger Scheduler
on:
schedule:
- cron: '15 13 * * 6'
workflow_dispatch:
jobs:
package-trigger-scheduler:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3.1.0
with:
fetch-depth: '0'
- name: Package Trigger Scheduler
run: |
echo "**** Branches found: ****"
git for-each-ref --format='%(refname:short)' refs/remotes
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
do
br=$(echo "$br" | sed 's|origin/||g')
echo "**** Evaluating branch ${br} ****"
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-baseimage-alpine/${br}/jenkins-vars.yml | yq -r '.ls_branch')
if [ "${br}" == "${ls_branch}" ]; then
echo "**** Branch ${br} appears to be live; checking workflow. ****"
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-baseimage-alpine/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****"
echo "Triggering package trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} "
curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \
-H "Accept: application/vnd.github.v3+json" \
-d "{\"ref\":\"refs/heads/${br}\"}" \
https://api.github.com/repos/linuxserver/docker-baseimage-alpine/actions/workflows/package_trigger.yml/dispatches
sleep 30
else
echo "**** Workflow doesn't exist; skipping trigger. ****"
echo "Skipping branch ${br} due to no package trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi
else
echo "**** ${br} appears to be a dev branch; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
fi
done
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"
echo "**** Notifying Discord ****"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Package Check Build(s) Triggered for baseimage-alpine** \n**Branch(es):** '"${triggered_branches}"' \n**Build URL:** '"https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-baseimage-alpine/activity/"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}

View file

@ -1,10 +0,0 @@
name: Permission check
on:
pull_request_target:
paths:
- '**/run'
- '**/finish'
- '**/check'
jobs:
permission_check:
uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1

View file

@ -1,10 +1,10 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM alpine:3.17 as rootfs-stage FROM alpine:3.20 AS rootfs-stage
# environment # environment
ENV ROOTFS=/root-out ENV ROOTFS=/root-out
ENV REL=v3.18 ENV REL=v3.21
ENV ARCH=x86_64 ENV ARCH=x86_64
ENV MIRROR=http://dl-cdn.alpinelinux.org/alpine ENV MIRROR=http://dl-cdn.alpinelinux.org/alpine
ENV PACKAGES=alpine-baselayout,\ ENV PACKAGES=alpine-baselayout,\
@ -30,7 +30,7 @@ RUN \
sed -i -e 's/^root::/root:!:/' /root-out/etc/shadow sed -i -e 's/^root::/root:!:/' /root-out/etc/shadow
# set version for s6 overlay # set version for s6 overlay
ARG S6_OVERLAY_VERSION="3.1.5.0" ARG S6_OVERLAY_VERSION="3.2.0.2"
ARG S6_OVERLAY_ARCH="x86_64" ARG S6_OVERLAY_ARCH="x86_64"
# add s6 overlay # add s6 overlay
@ -41,7 +41,7 @@ RUN tar -C /root-out -Jxpf /tmp/s6-overlay-${S6_OVERLAY_ARCH}.tar.xz
# add s6 optional symlinks # add s6 optional symlinks
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-noarch.tar.xz /tmp ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-noarch.tar.xz /tmp
RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-noarch.tar.xz RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-noarch.tar.xz && unlink /root-out/usr/bin/with-contenv
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-arch.tar.xz /tmp ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-arch.tar.xz /tmp
RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-arch.tar.xz RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-arch.tar.xz
@ -52,11 +52,15 @@ ARG BUILD_DATE
ARG VERSION ARG VERSION
ARG MODS_VERSION="v3" ARG MODS_VERSION="v3"
ARG PKG_INST_VERSION="v1" ARG PKG_INST_VERSION="v1"
ARG LSIOWN_VERSION="v1"
ARG WITHCONTENV_VERSION="v1"
LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DATE}" LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DATE}"
LABEL maintainer="TheLamer" LABEL maintainer="TheLamer"
ADD --chmod=744 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/docker-mods.${MODS_VERSION}" "/docker-mods" ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/docker-mods.${MODS_VERSION}" "/docker-mods"
ADD --chmod=744 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/package-install.${PKG_INST_VERSION}" "/etc/s6-overlay/s6-rc.d/init-mods-package-install/run" ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/package-install.${PKG_INST_VERSION}" "/etc/s6-overlay/s6-rc.d/init-mods-package-install/run"
ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/lsiown.${LSIOWN_VERSION}" "/usr/bin/lsiown"
ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/with-contenv.${WITHCONTENV_VERSION}" "/usr/bin/with-contenv"
# environment variables # environment variables
ENV PS1="$(whoami)@$(hostname):$(pwd)\\$ " \ ENV PS1="$(whoami)@$(hostname):$(pwd)\\$ " \
@ -74,8 +78,10 @@ RUN \
alpine-release \ alpine-release \
bash \ bash \
ca-certificates \ ca-certificates \
catatonit \
coreutils \ coreutils \
curl \ curl \
findutils \
jq \ jq \
netcat-openbsd \ netcat-openbsd \
procps-ng \ procps-ng \

View file

@ -1,10 +1,10 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM alpine:3.17 as rootfs-stage FROM alpine:3.20 AS rootfs-stage
# environment # environment
ENV ROOTFS=/root-out ENV ROOTFS=/root-out
ENV REL=v3.18 ENV REL=v3.21
ENV ARCH=aarch64 ENV ARCH=aarch64
ENV MIRROR=http://dl-cdn.alpinelinux.org/alpine ENV MIRROR=http://dl-cdn.alpinelinux.org/alpine
ENV PACKAGES=alpine-baselayout,\ ENV PACKAGES=alpine-baselayout,\
@ -30,7 +30,7 @@ RUN \
sed -i -e 's/^root::/root:!:/' /root-out/etc/shadow sed -i -e 's/^root::/root:!:/' /root-out/etc/shadow
# set version for s6 overlay # set version for s6 overlay
ARG S6_OVERLAY_VERSION="3.1.5.0" ARG S6_OVERLAY_VERSION="3.2.0.2"
ARG S6_OVERLAY_ARCH="aarch64" ARG S6_OVERLAY_ARCH="aarch64"
# add s6 overlay # add s6 overlay
@ -41,7 +41,7 @@ RUN tar -C /root-out -Jxpf /tmp/s6-overlay-${S6_OVERLAY_ARCH}.tar.xz
# add s6 optional symlinks # add s6 optional symlinks
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-noarch.tar.xz /tmp ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-noarch.tar.xz /tmp
RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-noarch.tar.xz RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-noarch.tar.xz && unlink /root-out/usr/bin/with-contenv
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-arch.tar.xz /tmp ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-symlinks-arch.tar.xz /tmp
RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-arch.tar.xz RUN tar -C /root-out -Jxpf /tmp/s6-overlay-symlinks-arch.tar.xz
@ -52,11 +52,15 @@ ARG BUILD_DATE
ARG VERSION ARG VERSION
ARG MODS_VERSION="v3" ARG MODS_VERSION="v3"
ARG PKG_INST_VERSION="v1" ARG PKG_INST_VERSION="v1"
ARG LSIOWN_VERSION="v1"
ARG WITHCONTENV_VERSION="v1"
LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DATE}" LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DATE}"
LABEL maintainer="TheLamer" LABEL maintainer="TheLamer"
ADD --chmod=744 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/docker-mods.${MODS_VERSION}" "/docker-mods" ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/docker-mods.${MODS_VERSION}" "/docker-mods"
ADD --chmod=744 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/package-install.${PKG_INST_VERSION}" "/etc/s6-overlay/s6-rc.d/init-mods-package-install/run" ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/package-install.${PKG_INST_VERSION}" "/etc/s6-overlay/s6-rc.d/init-mods-package-install/run"
ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/lsiown.${LSIOWN_VERSION}" "/usr/bin/lsiown"
ADD --chmod=755 "https://raw.githubusercontent.com/linuxserver/docker-mods/mod-scripts/with-contenv.${WITHCONTENV_VERSION}" "/usr/bin/with-contenv"
# environment variables # environment variables
ENV PS1="$(whoami)@$(hostname):$(pwd)\\$ " \ ENV PS1="$(whoami)@$(hostname):$(pwd)\\$ " \
@ -74,8 +78,10 @@ RUN \
alpine-release \ alpine-release \
bash \ bash \
ca-certificates \ ca-certificates \
catatonit \
coreutils \ coreutils \
curl \ curl \
findutils \
jq \ jq \
netcat-openbsd \ netcat-openbsd \
procps-ng \ procps-ng \

616
Jenkinsfile vendored
View file

@ -16,7 +16,9 @@ pipeline {
GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab') GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab')
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0') GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
GITLAB_NAMESPACE=credentials('gitlab-namespace-id') GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
SCARF_TOKEN=credentials('scarf_api_key') DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat')
QUAYIO_API_TOKEN=credentials('quayio-repo-api-token')
GIT_SIGNING_KEY=credentials('484fbca6-9a4f-455e-b9e3-97ac98785f5f')
BUILD_VERSION_ARG = 'OS' BUILD_VERSION_ARG = 'OS'
LS_USER = 'linuxserver' LS_USER = 'linuxserver'
LS_REPO = 'docker-baseimage-alpine' LS_REPO = 'docker-baseimage-alpine'
@ -31,24 +33,50 @@ pipeline {
CI_PORT='80' CI_PORT='80'
CI_SSL='true' CI_SSL='true'
CI_DELAY='30' CI_DELAY='30'
CI_DOCKERENV='TZ=US/Pacific|LSIO_FIRST_PARTY=true' CI_DOCKERENV='LSIO_FIRST_PARTY=true'
CI_AUTH='user:password' CI_AUTH=''
CI_WEBPATH='' CI_WEBPATH=''
} }
stages { stages {
stage("Set git config"){
steps{
sh '''#!/bin/bash
cat ${GIT_SIGNING_KEY} > /config/.ssh/id_sign
chmod 600 /config/.ssh/id_sign
ssh-keygen -y -f /config/.ssh/id_sign > /config/.ssh/id_sign.pub
echo "Using $(ssh-keygen -lf /config/.ssh/id_sign) to sign commits"
git config --global gpg.format ssh
git config --global user.signingkey /config/.ssh/id_sign
git config --global commit.gpgsign true
'''
}
}
// Setup all the basic environment variables needed for the build // Setup all the basic environment variables needed for the build
stage("Set ENV Variables base"){ stage("Set ENV Variables base"){
steps{ steps{
echo "Running on node: ${NODE_NAME}"
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) echo "Pruning builder"
docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi fi
docker system prune -af --volumes || : ''' done
fi
docker system prune -f --volumes || :
docker image prune -af || :
'''
script{ script{
env.EXIT_STATUS = '' env.EXIT_STATUS = ''
env.LS_RELEASE = sh( env.LS_RELEASE = sh(
script: '''docker run --rm quay.io/skopeo/stable:v1 inspect docker://ghcr.io/${LS_USER}/${CONTAINER_NAME}:3.18 2>/dev/null | jq -r '.Labels.build_version' | awk '{print $3}' | grep '\\-ls' || : ''', script: '''docker run --rm quay.io/skopeo/stable:v1 inspect docker://ghcr.io/${LS_USER}/${CONTAINER_NAME}:3.21 2>/dev/null | jq -r '.Labels.build_version' | awk '{print $3}' | grep '\\-ls' || : ''',
returnStdout: true).trim() returnStdout: true).trim()
env.LS_RELEASE_NOTES = sh( env.LS_RELEASE_NOTES = sh(
script: '''cat readme-vars.yml | awk -F \\" '/date: "[0-9][0-9].[0-9][0-9].[0-9][0-9]:/ {print $4;exit;}' | sed -E ':a;N;$!ba;s/\\r{0,1}\\n/\\\\n/g' ''', script: '''cat readme-vars.yml | awk -F \\" '/date: "[0-9][0-9].[0-9][0-9].[0-9][0-9]:/ {print $4;exit;}' | sed -E ':a;N;$!ba;s/\\r{0,1}\\n/\\\\n/g' ''',
@ -59,11 +87,16 @@ pipeline {
env.COMMIT_SHA = sh( env.COMMIT_SHA = sh(
script: '''git rev-parse HEAD''', script: '''git rev-parse HEAD''',
returnStdout: true).trim() returnStdout: true).trim()
env.GH_DEFAULT_BRANCH = sh(
script: '''git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||' ''',
returnStdout: true).trim()
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
env.PULL_REQUEST = env.CHANGE_ID env.PULL_REQUEST = env.CHANGE_ID
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml' env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml'
} }
sh '''#! /bin/bash
echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" '''
script{ script{
env.LS_RELEASE_NUMBER = sh( env.LS_RELEASE_NUMBER = sh(
script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''', script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''',
@ -72,7 +105,7 @@ pipeline {
script{ script{
env.LS_TAG_NUMBER = sh( env.LS_TAG_NUMBER = sh(
script: '''#! /bin/bash script: '''#! /bin/bash
tagsha=$(git rev-list -n 1 3.18-${LS_RELEASE} 2>/dev/null) tagsha=$(git rev-list -n 1 3.21-${LS_RELEASE} 2>/dev/null)
if [ "${tagsha}" == "${COMMIT_SHA}" ]; then if [ "${tagsha}" == "${COMMIT_SHA}" ]; then
echo ${LS_RELEASE_NUMBER} echo ${LS_RELEASE_NUMBER}
elif [ -z "${GIT_COMMIT}" ]; then elif [ -z "${GIT_COMMIT}" ]; then
@ -119,7 +152,7 @@ pipeline {
steps{ steps{
script{ script{
env.EXT_RELEASE_CLEAN = sh( env.EXT_RELEASE_CLEAN = sh(
script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g' ''', script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/ ]//g' ''',
returnStdout: true).trim() returnStdout: true).trim()
def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/ def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/
@ -137,7 +170,7 @@ pipeline {
} }
if (env.SEMVER != null) { if (env.SEMVER != null) {
if (BRANCH_NAME != "master" && BRANCH_NAME != "main") { if (BRANCH_NAME != "${env.GH_DEFAULT_BRANCH}") {
env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}" env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}"
} }
println("SEMVER: ${env.SEMVER}") println("SEMVER: ${env.SEMVER}")
@ -161,13 +194,14 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-3.18-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm64v8-3.18-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.CI_TAGS = 'amd64-3.21-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm64v8-3.21-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
} else { } else {
env.CI_TAGS = '3.18-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.CI_TAGS = '3.21-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
} }
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.META_TAG = '3.18-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.META_TAG = '3.21-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.EXT_RELEASE_TAG = '3.18-version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = '3.21-version-' + env.EXT_RELEASE_CLEAN
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@ -184,14 +218,15 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lsiodev-' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lsiodev-' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/lsiodev-' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/lsiodev-' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm64v8-3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.CI_TAGS = 'amd64-3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm64v8-3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
} else { } else {
env.CI_TAGS = '3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.CI_TAGS = '3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
} }
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
env.META_TAG = '3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.META_TAG = '3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
env.EXT_RELEASE_TAG = '3.18-version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = '3.21-version-' + env.EXT_RELEASE_CLEAN
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@ -207,15 +242,16 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lspipepr-' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lspipepr-' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/lspipepr-' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/lspipepr-' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST + '|arm64v8-3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST env.CI_TAGS = 'amd64-3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST + '|arm64v8-3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
} else { } else {
env.CI_TAGS = '3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST env.CI_TAGS = '3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
} }
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
env.META_TAG = '3.18-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST env.META_TAG = '3.21-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
env.EXT_RELEASE_TAG = '3.18-version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = '3.21-version-' + env.EXT_RELEASE_CLEAN
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@ -238,9 +274,11 @@ pipeline {
-v ${WORKSPACE}:/mnt \ -v ${WORKSPACE}:/mnt \
-e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \ -e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \
-e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \ -e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \
ghcr.io/linuxserver/baseimage-alpine:3.17 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\ ghcr.io/linuxserver/baseimage-alpine:3.20 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\
apk add --no-cache py3-pip && \ apk add --no-cache python3 && \
pip install s3cmd && \ python3 -m venv /lsiopy && \
pip install --no-cache-dir -U pip && \
pip install --no-cache-dir s3cmd && \
s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :''' s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :'''
} }
} }
@ -259,8 +297,15 @@ pipeline {
set -e set -e
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
docker pull ghcr.io/linuxserver/jenkins-builder:latest docker pull ghcr.io/linuxserver/jenkins-builder:latest
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH=master -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest # Cloned repo paths for templating:
# Stage 1 - Jenkinsfile update # ${TEMPDIR}/docker-${CONTAINER_NAME}: Cloned branch master of ${LS_USER}/${LS_REPO} for running the jenkins builder on
# ${TEMPDIR}/repo/${LS_REPO}: Cloned branch master of ${LS_USER}/${LS_REPO} for commiting various templated file changes and pushing back to Github
# ${TEMPDIR}/docs/docker-documentation: Cloned docs repo for pushing docs updates to Github
# ${TEMPDIR}/unraid/docker-templates: Cloned docker-templates repo to check for logos
# ${TEMPDIR}/unraid/templates: Cloned templates repo for commiting unraid template changes and pushing back to Github
git clone --branch master --depth 1 https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/docker-${CONTAINER_NAME}
docker run --rm -v ${TEMPDIR}/docker-${CONTAINER_NAME}:/tmp -e LOCAL=true -e PUID=$(id -u) -e PGID=$(id -g) ghcr.io/linuxserver/jenkins-builder:latest
echo "Starting Stage 1 - Jenkinsfile update"
if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then
mkdir -p ${TEMPDIR}/repo mkdir -p ${TEMPDIR}/repo
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
@ -269,16 +314,17 @@ pipeline {
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/
git add Jenkinsfile git add Jenkinsfile
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating Jenkinsfile" echo "Updating Jenkinsfile and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR} rm -Rf ${TEMPDIR}
exit 0 exit 0
else else
echo "Jenkinsfile is up to date." echo "Jenkinsfile is up to date."
fi fi
# Stage 2 - Delete old templates echo "Starting Stage 2 - Delete old templates"
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml Dockerfile.armhf" OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml .github/workflows/package_trigger.yml"
for i in ${OLD_TEMPLATES}; do for i in ${OLD_TEMPLATES}; do
if [[ -f "${i}" ]]; then if [[ -f "${i}" ]]; then
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}" TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
@ -293,15 +339,16 @@ pipeline {
git rm "${i}" git rm "${i}"
done done
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Deleting old and deprecated templates" echo "Deleting old/deprecated templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR} rm -Rf ${TEMPDIR}
exit 0 exit 0
else else
echo "No templates to delete" echo "No templates to delete"
fi fi
# Stage 3 - Update templates echo "Starting Stage 3 - Update templates"
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
cd ${TEMPDIR}/docker-${CONTAINER_NAME} cd ${TEMPDIR}/docker-${CONTAINER_NAME}
NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
@ -322,19 +369,62 @@ pipeline {
fi fi
git add readme-vars.yml ${TEMPLATED_FILES} git add readme-vars.yml ${TEMPLATED_FILES}
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR}
exit 0
else else
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "No templates to update"
fi fi
mkdir -p ${TEMPDIR}/gitbook echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub"
git clone https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/gitbook/docker-documentation mkdir -p ${TEMPDIR}/docs
if [[ ("${BRANCH_NAME}" == "master") || ("${BRANCH_NAME}" == "main") ]] && [[ (! -f ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then git clone --depth=1 https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/gitbook/docker-documentation/images/ if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then
cd ${TEMPDIR}/gitbook/docker-documentation/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/
git add images/docker-${CONTAINER_NAME}.md cd ${TEMPDIR}/docs/docker-documentation
GH_DOCS_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
git add docs/images/docker-${CONTAINER_NAME}.md
echo "Updating docs repo"
git commit -m 'Bot Updating Documentation' git commit -m 'Bot Updating Documentation'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} || \
(MAXWAIT="10" && echo "Push to docs failed, trying again in ${MAXWAIT} seconds" && \
sleep $((RANDOM % MAXWAIT)) && \
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase && \
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH})
else
echo "Docs update not needed, skipping"
fi
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]]; then
if [[ $(cat ${TEMPDIR}/docker-${CONTAINER_NAME}/README.md | wc -m) -gt 25000 ]]; then
echo "Readme is longer than 25,000 characters. Syncing the lite version to Docker Hub"
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/README.lite"
else
echo "Syncing readme to Docker Hub"
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/README.md"
fi
if curl -s https://hub.docker.com/v2/namespaces/${DOCKERHUB_IMAGE%%/*}/repositories/${DOCKERHUB_IMAGE##*/}/tags | jq -r '.message' | grep -q 404; then
echo "Docker Hub endpoint doesn't exist. Creating endpoint first."
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
curl -s \
-H "Authorization: JWT ${DH_TOKEN}" \
-H "Content-Type: application/json" \
-X POST \
-d '{"name":"'${DOCKERHUB_IMAGE##*/}'", "namespace":"'${DOCKERHUB_IMAGE%%/*}'"}' \
https://hub.docker.com/v2/repositories/ || :
fi
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
curl -s \
-H "Authorization: JWT ${DH_TOKEN}" \
-H "Content-Type: application/json" \
-X PATCH \
-d "{\\"full_description\\":$(jq -Rsa . ${DH_README_SYNC_PATH})}" \
https://hub.docker.com/v2/repositories/${DOCKERHUB_IMAGE} || :
else
echo "Not the default Github branch. Skipping readme sync to Docker Hub."
fi fi
rm -Rf ${TEMPDIR}''' rm -Rf ${TEMPDIR}'''
script{ script{
@ -381,10 +471,10 @@ pipeline {
} }
} }
/* ####################### /* #######################
GitLab Mirroring GitLab Mirroring and Quay.io Repo Visibility
####################### */ ####################### */
// Ping into Gitlab to mirror this repo and have a registry endpoint // Ping into Gitlab to mirror this repo and have a registry endpoint & mark this repo on Quay.io as public
stage("GitLab Mirror"){ stage("GitLab Mirror and Quay.io Visibility"){
when { when {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
@ -398,35 +488,10 @@ pipeline {
"merge_requests_access_level":"disabled",\ "merge_requests_access_level":"disabled",\
"repository_access_level":"enabled",\ "repository_access_level":"enabled",\
"visibility":"public"}' ''' "visibility":"public"}' '''
} sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \
} -d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" '''
/* ####################### sh '''curl -H "Content-Type: application/json" -H "Authorization: Bearer ${QUAYIO_API_TOKEN}" -X POST "https://quay.io/api/v1/repository${QUAYIMAGE/quay.io/}/changevisibility" \
Scarf.sh package registry -d '{"visibility":"public"}' ||: '''
####################### */
// Add package to Scarf.sh and set permissions
stage("Scarf.sh package registry"){
when {
branch "master"
environment name: 'EXIT_STATUS', value: ''
}
steps{
sh '''#! /bin/bash
PACKAGE_UUID=$(curl -X GET -H "Authorization: Bearer ${SCARF_TOKEN}" https://scarf.sh/api/v1/organizations/linuxserver-ci/packages | jq -r '.[] | select(.name=="linuxserver/baseimage-alpine") | .uuid' || :)
if [ -z "${PACKAGE_UUID}" ]; then
echo "Adding package to Scarf.sh"
curl -sX POST https://scarf.sh/api/v1/organizations/linuxserver-ci/packages \
-H "Authorization: Bearer ${SCARF_TOKEN}" \
-H "Content-Type: application/json" \
-d '{"name":"linuxserver/baseimage-alpine",\
"shortDescription":"example description",\
"libraryType":"docker",\
"website":"https://github.com/linuxserver/docker-baseimage-alpine",\
"backendUrl":"https://ghcr.io/linuxserver/baseimage-alpine",\
"publicUrl":"https://lscr.io/linuxserver/baseimage-alpine"}' || :
else
echo "Package already exists on Scarf.sh"
fi
'''
} }
} }
/* ############### /* ###############
@ -456,7 +521,42 @@ pipeline {
--label \"org.opencontainers.image.title=Baseimage-alpine\" \ --label \"org.opencontainers.image.title=Baseimage-alpine\" \
--label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \ --label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \
--no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \
--provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
// Build MultiArch Docker containers for push to LS Repo // Build MultiArch Docker containers for push to LS Repo
@ -486,7 +586,42 @@ pipeline {
--label \"org.opencontainers.image.title=Baseimage-alpine\" \ --label \"org.opencontainers.image.title=Baseimage-alpine\" \
--label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \ --label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \
--no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \
--provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
stage('Build ARM64') { stage('Build ARM64') {
@ -495,10 +630,6 @@ pipeline {
} }
steps { steps {
echo "Running on node: ${NODE_NAME}" echo "Running on node: ${NODE_NAME}"
echo 'Logging into Github'
sh '''#! /bin/bash
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
'''
sh "docker buildx build \ sh "docker buildx build \
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
--label \"org.opencontainers.image.authors=linuxserver.io\" \ --label \"org.opencontainers.image.authors=linuxserver.io\" \
@ -513,17 +644,50 @@ pipeline {
--label \"org.opencontainers.image.title=Baseimage-alpine\" \ --label \"org.opencontainers.image.title=Baseimage-alpine\" \
--label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \ --label \"org.opencontainers.image.description=baseimage-alpine image by linuxserver.io\" \
--no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \
--provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh "docker tag ${IMAGE}:arm64v8-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" sh '''#! /bin/bash
retry(5) { set -e
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
} }
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) containers=$(docker ps -aq)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} docker stop ${containers}
fi fi
docker system prune -af --volumes || : ''' docker system prune -f --volumes || :
docker image prune -af || :
'''
} }
} }
} }
@ -539,7 +703,7 @@ pipeline {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" == "false" ]; then if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" != "true" ]; then
LOCAL_CONTAINER=${IMAGE}:amd64-${META_TAG} LOCAL_CONTAINER=${IMAGE}:amd64-${META_TAG}
else else
LOCAL_CONTAINER=${IMAGE}:${META_TAG} LOCAL_CONTAINER=${IMAGE}:${META_TAG}
@ -560,7 +724,8 @@ pipeline {
wait wait
git add package_versions.txt git add package_versions.txt
git commit -m 'Bot Updating Package Versions' git commit -m 'Bot Updating Package Versions'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER}
echo "Package tag updated, stopping build process" echo "Package tag updated, stopping build process"
else else
@ -626,27 +791,37 @@ pipeline {
} }
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
if grep -q 'docker-baseimage' <<< "${LS_REPO}"; then
echo "Detected baseimage, setting LSIO_FIRST_PARTY=true"
if [ -n "${CI_DOCKERENV}" ]; then
CI_DOCKERENV="LSIO_FIRST_PARTY=true|${CI_DOCKERENV}"
else
CI_DOCKERENV="LSIO_FIRST_PARTY=true"
fi
fi
docker pull ghcr.io/linuxserver/ci:latest docker pull ghcr.io/linuxserver/ci:latest
if [ "${MULTIARCH}" == "true" ]; then if [ "${MULTIARCH}" == "true" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
docker run --rm \ docker run --rm \
--shm-size=1gb \ --shm-size=1gb \
-v /var/run/docker.sock:/var/run/docker.sock \ -v /var/run/docker.sock:/var/run/docker.sock \
-e IMAGE=\"${IMAGE}\" \ -e IMAGE=\"${IMAGE}\" \
-e DELAY_START=\"${CI_DELAY}\" \ -e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \
-e TAGS=\"${CI_TAGS}\" \ -e TAGS=\"${CI_TAGS}\" \
-e META_TAG=\"${META_TAG}\" \ -e META_TAG=\"${META_TAG}\" \
-e RELEASE_TAG=\"3.21\" \
-e PORT=\"${CI_PORT}\" \ -e PORT=\"${CI_PORT}\" \
-e SSL=\"${CI_SSL}\" \ -e SSL=\"${CI_SSL}\" \
-e BASE=\"${DIST_IMAGE}\" \ -e BASE=\"${DIST_IMAGE}\" \
-e SECRET_KEY=\"${S3_SECRET}\" \ -e SECRET_KEY=\"${S3_SECRET}\" \
-e ACCESS_KEY=\"${S3_KEY}\" \ -e ACCESS_KEY=\"${S3_KEY}\" \
-e DOCKER_ENV=\"LSIO_FIRST_PARTY=true|${CI_DOCKERENV}\" \ -e DOCKER_ENV=\"${CI_DOCKERENV}\" \
-e WEB_SCREENSHOT=\"${CI_WEB}\" \ -e WEB_SCREENSHOT=\"${CI_WEB}\" \
-e WEB_AUTH=\"${CI_AUTH}\" \ -e WEB_AUTH=\"${CI_AUTH}\" \
-e WEB_PATH=\"${CI_WEBPATH}\" \ -e WEB_PATH=\"${CI_WEBPATH}\" \
-e NODE_NAME=\"${NODE_NAME}\" \
-t ghcr.io/linuxserver/ci:latest \ -t ghcr.io/linuxserver/ci:latest \
python3 test_build.py''' python3 test_build.py'''
} }
@ -662,46 +837,26 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
usernameVariable: 'DOCKERUSER',
passwordVariable: 'DOCKERPASS'
],
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry(5) {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin [[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}"
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin IFS=',' read -ra CACHE <<< "$BUILDCACHE"
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin for i in "${CACHE[@]}"; do
for PUSHIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
docker tag ${IMAGE}:${META_TAG} ${PUSHIMAGE}:${META_TAG} CACHEIMAGE=${i}
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:3.18
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${SEMVER}
fi fi
docker push ${PUSHIMAGE}:3.18 done
docker push ${PUSHIMAGE}:${META_TAG} docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:3.21 -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
docker push ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker push ${PUSHIMAGE}:${SEMVER} docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
fi fi
done done
''' '''
} }
} }
} }
}
// If this is a multi arch release push all images and define the manifest // If this is a multi arch release push all images and define the manifest
stage('Docker-Push-Multi') { stage('Docker-Push-Multi') {
when { when {
@ -709,88 +864,37 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
usernameVariable: 'DOCKERUSER',
passwordVariable: 'DOCKERPASS'
],
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry(5) {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [ "${CI}" == "false" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} [[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}"
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-3.18 IFS=',' read -ra CACHE <<< "$BUILDCACHE"
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-3.18 CACHEIMAGE=${i}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi fi
docker push ${MANIFESTIMAGE}:amd64-${META_TAG} done
docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-3.21 -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
docker push ${MANIFESTIMAGE}:amd64-3.18 docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-3.21 -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker push ${MANIFESTIMAGE}:arm64v8-3.18
docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker push ${MANIFESTIMAGE}:amd64-${SEMVER} docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER} docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
fi fi
docker manifest push --purge ${MANIFESTIMAGE}:3.18 || : done
docker manifest create ${MANIFESTIMAGE}:3.18 ${MANIFESTIMAGE}:amd64-3.18 ${MANIFESTIMAGE}:arm64v8-3.18 for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker manifest annotate ${MANIFESTIMAGE}:3.18 ${MANIFESTIMAGE}:arm64v8-3.18 --os linux --arch arm64 --variant v8 docker buildx imagetools create -t ${MANIFESTIMAGE}:3.21 ${MANIFESTIMAGE}:amd64-3.21 ${MANIFESTIMAGE}:arm64v8-3.21
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG} || : docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker manifest create ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker manifest annotate ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} --os linux --arch arm64 --variant v8 docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} || :
docker manifest create ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
docker manifest annotate ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} --os linux --arch arm64 --variant v8
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER} || : docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
docker manifest create ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
docker manifest annotate ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} --os linux --arch arm64 --variant v8
fi
token=$(curl -sX GET "https://ghcr.io/token?scope=repository%3Alinuxserver%2F${CONTAINER_NAME}%3Apull" | jq -r '.token')
digest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/linuxserver/${CONTAINER_NAME}/manifests/arm32v7-3.18")
if [[ $(echo "$digest" | jq -r '.layers') != "null" ]]; then
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-3.18 || :
docker manifest create ${MANIFESTIMAGE}:arm32v7-3.18 ${MANIFESTIMAGE}:amd64-3.18
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-3.18
fi
docker manifest push --purge ${MANIFESTIMAGE}:3.18
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG}
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER}
fi fi
done done
''' '''
} }
} }
} }
}
// If this is a public release tag it in the LS Github // If this is a public release tag it in the LS Github
stage('Github-Tag-Push-Release') { stage('Github-Tag-Push-Release') {
when { when {
@ -808,50 +912,52 @@ pipeline {
"object": "'${COMMIT_SHA}'",\ "object": "'${COMMIT_SHA}'",\
"message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\ "message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\
"type": "commit",\ "type": "commit",\
"tagger": {"name": "LinuxServer Jenkins","email": "jenkins@linuxserver.io","date": "'${GITHUB_DATE}'"}}' ''' "tagger": {"name": "LinuxServer-CI","email": "ci@linuxserver.io","date": "'${GITHUB_DATE}'"}}' '''
echo "Pushing New release for Tag" echo "Pushing New release for Tag"
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Updating base packages to ${PACKAGE_TAG}" > releasebody.json echo "Updating base packages to ${PACKAGE_TAG}" > releasebody.json
echo '{"tag_name":"'${META_TAG}'",\ echo '{"tag_name":"'${META_TAG}'",\
"target_commitish": "master",\ "target_commitish": "master",\
"name": "'${META_TAG}'",\ "name": "'${META_TAG}'",\
"body": "**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**OS Changes:**\\n\\n' > start "body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start
printf '","draft": false,"prerelease": false}' >> releasebody.json printf '","draft": false,"prerelease": false}' >> releasebody.json
paste -d'\\0' start releasebody.json > releasebody.json.done paste -d'\\0' start releasebody.json > releasebody.json.done
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
} }
} }
// Use helper container to sync the current README on master to the dockerhub endpoint // Add protection to the release branch
stage('Sync-README') { stage('Github-Release-Branch-Protection') {
when { when {
branch "master"
environment name: 'CHANGE_ID', value: '' environment name: 'CHANGE_ID', value: ''
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ echo "Setting up protection for release branch master"
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
usernameVariable: 'DOCKERUSER',
passwordVariable: 'DOCKERPASS'
]
]) {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e curl -H "Authorization: token ${GITHUB_TOKEN}" -X PUT https://api.github.com/repos/${LS_USER}/${LS_REPO}/branches/master/protection \
TEMPDIR=$(mktemp -d) -d $(jq -c . << EOF
docker pull ghcr.io/linuxserver/jenkins-builder:latest {
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH="${BRANCH_NAME}" -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest "required_status_checks": null,
docker pull ghcr.io/linuxserver/readme-sync "enforce_admins": false,
docker run --rm=true \ "required_pull_request_reviews": {
-e DOCKERHUB_USERNAME=$DOCKERUSER \ "dismiss_stale_reviews": false,
-e DOCKERHUB_PASSWORD=$DOCKERPASS \ "require_code_owner_reviews": false,
-e GIT_REPOSITORY=${LS_USER}/${LS_REPO} \ "require_last_push_approval": false,
-e DOCKER_REPOSITORY=${IMAGE} \ "required_approving_review_count": 1
-e GIT_BRANCH=master \ },
-v ${TEMPDIR}/docker-${CONTAINER_NAME}:/mnt \ "restrictions": null,
ghcr.io/linuxserver/readme-sync bash -c 'node sync' "required_linear_history": false,
rm -Rf ${TEMPDIR} ''' "allow_force_pushes": false,
"allow_deletions": false,
"block_creations": false,
"required_conversation_resolution": true,
"lock_branch": false,
"allow_fork_syncing": false,
"required_signatures": false
} }
EOF
) '''
} }
} }
// If this is a Pull request send the CI link as a comment on it // If this is a Pull request send the CI link as a comment on it
@ -938,32 +1044,94 @@ pipeline {
###################### */ ###################### */
post { post {
always { always {
sh '''#!/bin/bash
rm -rf /config/.ssh/id_sign
rm -rf /config/.ssh/id_sign.pub
git config --global --unset gpg.format
git config --global --unset user.signingkey
git config --global --unset commit.gpgsign
'''
script{ script{
env.JOB_DATE = sh(
script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''',
returnStdout: true).trim()
if (env.EXIT_STATUS == "ABORTED"){ if (env.EXIT_STATUS == "ABORTED"){
sh 'echo "build aborted"' sh 'echo "build aborted"'
}else{
if (currentBuild.currentResult == "SUCCESS"){
if (env.GITHUBIMAGE =~ /lspipepr/){
env.JOB_WEBHOOK_STATUS='Success'
env.JOB_WEBHOOK_COLOUR=3957028
env.JOB_WEBHOOK_FOOTER='PR Build'
}else if (env.GITHUBIMAGE =~ /lsiodev/){
env.JOB_WEBHOOK_STATUS='Success'
env.JOB_WEBHOOK_COLOUR=3957028
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Success'
env.JOB_WEBHOOK_COLOUR=1681177
env.JOB_WEBHOOK_FOOTER='Live Build'
} }
else if (currentBuild.currentResult == "SUCCESS"){ }else{
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 1681177,\ if (env.GITHUBIMAGE =~ /lspipepr/){
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** Success\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ env.JOB_WEBHOOK_STATUS='Failure'
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='PR Build'
}else if (env.GITHUBIMAGE =~ /lsiodev/){
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=16711680
env.JOB_WEBHOOK_FOOTER='Live Build'
} }
else { }
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 16711680,\ sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"'color'": '${JOB_WEBHOOK_COLOUR}',\
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** failure\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ "footer": {"text" : "'"${JOB_WEBHOOK_FOOTER}"'"},\
"timestamp": "'${JOB_DATE}'",\
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** '${JOB_WEBHOOK_STATUS}'\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' "username": "Jenkins"}' ${BUILDS_DISCORD} '''
} }
} }
} }
cleanup { cleanup {
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Performing docker system prune!!" echo "Pruning builder!!"
containers=$(docker ps -aq) docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi fi
docker system prune -af --volumes || : done
fi
docker system prune -f --volumes || :
docker image prune -af || :
''' '''
cleanWs() cleanWs()
} }
} }
} }
def retry_backoff(int max_attempts, int power_base, Closure c) {
int n = 0
while (n < max_attempts) {
try {
c()
return
} catch (err) {
if ((n + 1) >= max_attempts) {
throw err
}
sleep(power_base ** n)
n++
}
}
return
}

View file

@ -1,30 +1 @@
<!-- DO NOT EDIT THIS FILE MANUALLY --> A custom base image built with [Alpine Linux](https://alpinelinux.org) and [s6-overlay](https://github.com/just-containers/s6-overlay).
<!-- Please read the https://github.com/linuxserver/docker-baseimage-alpine/blob/master/.github/CONTRIBUTING.md -->
[linuxserverurl]: https://linuxserver.io
[forumurl]: https://forum.linuxserver.io
[ircurl]: https://mibbit.com/?server=irc.libera.chat&channel=%23linuxserver.io
[appurl]: https://alpinelinux.org
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png?v=4&s=4000)][linuxserverurl]
## Contact information:-
| Type | Address/Details |
| :---: | --- |
| Discord | [Discord](https://discord.gg/YWrKVTn) |
| IRC | libera at `#linuxserver.io` more information at:- [IRC][ircurl]
| Forum | [LinuxServer.io forum][forumurl] |
&nbsp;
&nbsp;
[![](https://images.microbadger.com/badges/image/lsiobase/alpine.svg)](https://microbadger.com/images/lsiobase/alpine "Get your own image badge on microbadger.com")
[![](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/Dockerfile-Link-green.png)](https://github.com/linuxserver/docker-baseimage-alpine/blob/master/Dockerfile)
A custom base image built with [Alpine linux][appurl] and [S6 overlay](https://github.com/just-containers/s6-overlay)..
The following line is only in this repo for loop testing:
- { date: "01.01.50:", desc: "I am the release message for this internal repo." }

View file

@ -4,9 +4,10 @@
project_name: docker-baseimage-alpine project_name: docker-baseimage-alpine
external_type: os external_type: os
release_type: stable release_type: stable
release_tag: "3.18" release_tag: "3.21"
ls_branch: master ls_branch: master
build_armhf: false image_sbom: true
image_provenance: true
repo_vars: repo_vars:
- BUILD_VERSION_ARG = 'OS' - BUILD_VERSION_ARG = 'OS'
- LS_USER = 'linuxserver' - LS_USER = 'linuxserver'
@ -22,6 +23,6 @@ repo_vars:
- CI_PORT='80' - CI_PORT='80'
- CI_SSL='true' - CI_SSL='true'
- CI_DELAY='30' - CI_DELAY='30'
- CI_DOCKERENV='TZ=US/Pacific|LSIO_FIRST_PARTY=true' - CI_DOCKERENV='LSIO_FIRST_PARTY=true'
- CI_AUTH='user:password' - CI_AUTH=''
- CI_WEBPATH='' - CI_WEBPATH=''

View file

@ -1,45 +1,51 @@
NAME VERSION TYPE NAME VERSION TYPE
alpine-baselayout 3.4.3-r1 apk acl-libs 2.3.2-r1 apk
alpine-baselayout-data 3.4.3-r1 apk alpine-baselayout 3.6.8-r1 apk
alpine-keys 2.4-r1 apk alpine-baselayout-data 3.6.8-r1 apk
alpine-release 3.18.3-r0 apk alpine-keys 2.5-r0 apk
apk-tools 2.14.0-r2 apk alpine-release 3.21.3-r0 apk
bash 5.2.15-r5 apk apk-tools 2.14.6-r3 apk
brotli-libs 1.0.9-r14 apk bash 5.2.37-r0 apk
busybox 1.36.1 binary brotli-libs 1.1.0-r2 apk
busybox 1.36.1-r2 apk busybox 1.37.0-r12 apk
busybox-binsh 1.36.1-r2 apk busybox-binsh 1.37.0-r12 apk
ca-certificates 20230506-r0 apk c-ares 1.34.5-r0 apk
ca-certificates-bundle 20230506-r0 apk ca-certificates 20241121-r1 apk
coreutils 9.3-r1 apk ca-certificates-bundle 20241121-r1 apk
curl 8.2.1-r0 apk catatonit 0.2.0-r0 apk
jq 1.6-r3 apk coreutils 9.5-r2 apk
libacl 2.3.1-r3 apk coreutils-env 9.5-r2 apk
libattr 2.5.1-r4 apk coreutils-fmt 9.5-r2 apk
libbsd 0.11.7-r1 apk coreutils-sha512sum 9.5-r2 apk
libc-utils 0.7.2-r5 apk curl 8.12.1-r1 apk
libcrypto3 3.1.2-r0 apk findutils 4.10.0-r0 apk
libcurl 8.2.1-r0 apk jq 1.7.1-r0 apk
libidn2 2.3.4-r1 apk libattr 2.5.2-r2 apk
libintl 0.21.1-r7 apk libbsd 0.12.2-r0 apk
libmd 1.0.4-r2 apk libcrypto3 3.3.3-r0 apk
libncursesw 6.4_p20230506-r0 apk libcurl 8.12.1-r1 apk
libproc2 4.0.3-r1 apk libidn2 2.3.7-r0 apk
libssl3 3.1.2-r0 apk libintl 0.22.5-r0 apk
libunistring 1.1-r1 apk libmd 1.1.0-r0 apk
linux-pam 1.5.2-r10 apk libncursesw 6.5_p20241006-r3 apk
musl 1.2.4-r1 apk libproc2 4.0.4-r2 apk
musl-utils 1.2.4-r1 apk libpsl 0.21.5-r3 apk
ncurses-terminfo-base 6.4_p20230506-r0 apk libssl3 3.3.3-r0 apk
netcat-openbsd 1.219-r1 apk libunistring 1.2-r0 apk
nghttp2-libs 1.55.1-r0 apk linux-pam 1.6.1-r1 apk
oniguruma 6.9.8-r1 apk musl 1.2.5-r9 apk
procps-ng 4.0.3-r1 apk musl-utils 1.2.5-r9 apk
readline 8.2.1-r1 apk ncurses-terminfo-base 6.5_p20241006-r3 apk
scanelf 1.3.7-r1 apk netcat-openbsd 1.226.1.1-r0 apk
shadow 4.13-r4 apk nghttp2-libs 1.64.0-r0 apk
skalibs 2.13.1.1-r1 apk oniguruma 6.9.9-r0 apk
ssl_client 1.36.1-r2 apk procps-ng 4.0.4-r2 apk
tzdata 2023c-r1 apk readline 8.2.13-r0 apk
utmps-libs 0.1.2.1-r1 apk scanelf 1.3.8-r1 apk
zlib 1.2.13-r1 apk shadow 4.16.0-r1 apk
skalibs-libs 2.14.3.0-r0 apk
ssl_client 1.37.0-r12 apk
tzdata 2025b-r0 apk
utmps-libs 0.1.2.3-r2 apk
zlib 1.3.1-r2 apk
zstd-libs 1.5.6-r2 apk

View file

@ -4,29 +4,22 @@
project_name: baseimage-alpine project_name: baseimage-alpine
full_custom_readme: | full_custom_readme: |
{% raw -%} {% raw -%}
[linuxserverurl]: https://linuxserver.io [![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io)
[forumurl]: https://forum.linuxserver.io
[ircurl]: https://mibbit.com/?server=irc.libera.chat&channel=%23linuxserver.io
[appurl]: https://alpinelinux.org
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png?v=4&s=4000)][linuxserverurl]
## Contact information:- ## Contact information:-
| Type | Address/Details | | Type | Address/Details |
| :---: | --- | | :---: | --- |
| Discord | [Discord](https://discord.gg/YWrKVTn) | | Discord | [Discord](https://linuxserver.io/discord) |
| IRC | libera at `#linuxserver.io` more information at:- [IRC][ircurl] | IRC | `#linuxserver.io` on irc.libera.chat |
| Forum | [LinuxServer.io forum][forumurl] | | Forum | [Discourse](https://discourse.linuxserver.io/) |
&nbsp; A custom base image built with [Alpine Linux](https://alpinelinux.org) and [s6-overlay](https://github.com/just-containers/s6-overlay).
&nbsp;
[![](https://images.microbadger.com/badges/image/lsiobase/alpine.svg)](https://microbadger.com/images/lsiobase/alpine "Get your own image badge on microbadger.com") - Support for using our base images in your own projects is provided on a Reasonable Endeavours basis, please see our [Support Policy](https://www.linuxserver.io/supportpolicy) for details.
- There is no `latest` tag for any of our base images, by design. We often make breaking changes between versions, and we don't publish release notes like we do for the downstream images.
[![](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/Dockerfile-Link-green.png)](https://github.com/linuxserver/docker-baseimage-alpine/blob/master/Dockerfile) - If you're intending to distribute an image using one of our bases, please read our [docs on container branding](https://docs.linuxserver.io/general/container-branding/) first.
- Alpine releases are supported for 2 years, after which we will stop building new base images for that version.
A custom base image built with [Alpine linux][appurl] and [S6 overlay](https://github.com/just-containers/s6-overlay)..
The following line is only in this repo for loop testing: The following line is only in this repo for loop testing:

View file

@ -1,12 +1,3 @@
─────────────────────────────────────── ───────
_____ __ __ _____ _____ _____ _____ meatbag
| | | | __|_ _| | | ───────
| --| | |__ | | | | | | | | |
|_____|_____|_____| |_| |_____|_|_|_|
_____ __ __ _ __ ____
| __ | | | | | | \
| __ -| | | | |__| | |
|_____|_____|_|_____|____/
Based on images from linuxserver.io
───────────────────────────────────────

View file

@ -4,10 +4,21 @@
PUID=${PUID:-911} PUID=${PUID:-911}
PGID=${PGID:-911} PGID=${PGID:-911}
groupmod -o -g "$PGID" abc if [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]]; then
usermod -o -u "$PUID" abc USERHOME=$(grep abc /etc/passwd | cut -d ":" -f6)
usermod -d "/root" abc
cat /etc/s6-overlay/s6-rc.d/init-adduser/branding groupmod -o -g "${PGID}" abc
usermod -o -u "${PUID}" abc
usermod -d "${USERHOME}" abc
fi
if { [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]]; } || [[ ! ${LSIO_FIRST_PARTY} = "true" ]]; then
cat /etc/s6-overlay/s6-rc.d/init-adduser/branding
else
cat /run/branding
fi
if [[ -f /donate.txt ]]; then if [[ -f /donate.txt ]]; then
echo ' echo '
@ -21,26 +32,26 @@ https://www.linuxserver.io/donate/
─────────────────────────────────────── ───────────────────────────────────────
GID/UID GID/UID
───────────────────────────────────────' ───────────────────────────────────────'
if [[ -z ${LSIO_NON_ROOT_USER} ]]; then
echo " echo "
User UID: $(id -u abc) User UID: $(id -u abc)
User GID: $(id -g abc) User GID: $(id -g abc)
─────────────────────────────────────── ───────────────────────────────────────"
" else
echo "
if [[ "$(uname -m)" == "armv7l" ]] || [[ "$(uname -m)" == "armhf" ]]; then User UID: $(stat /run -c %u)
User GID: $(stat /run -c %g)
───────────────────────────────────────"
fi
if [[ -f /build_version ]]; then
cat /build_version
echo ' echo '
╔═════════════════════════════════════════════════════╗ ───────────────────────────────────────
║ ║ '
║ The 32-bit Arm version of this image will ║
║ no longer be supported after 2023-07-01 ║
║ ║
║ See: https://linuxserver.io/armhf ║
║ for more details ║
║ ║
╚═════════════════════════════════════════════════════╝
'
fi fi
lsiown abc:abc /app if [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]]; then
lsiown abc:abc /config lsiown abc:abc /app
lsiown abc:abc /defaults lsiown abc:abc /config
lsiown abc:abc /defaults
fi

View file

@ -2,6 +2,13 @@
# shellcheck shell=bash # shellcheck shell=bash
for cron_user in abc root; do for cron_user in abc root; do
if [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]]; then
if [[ -f "/etc/crontabs/${cron_user}" ]]; then
lsiown "${cron_user}":"${cron_user}" "/etc/crontabs/${cron_user}"
crontab -u "${cron_user}" "/etc/crontabs/${cron_user}"
fi
fi
if [[ -f "/defaults/crontabs/${cron_user}" ]]; then if [[ -f "/defaults/crontabs/${cron_user}" ]]; then
# make folders # make folders
mkdir -p \ mkdir -p \
@ -10,7 +17,7 @@ for cron_user in abc root; do
# if crontabs do not exist in config # if crontabs do not exist in config
if [[ ! -f "/config/crontabs/${cron_user}" ]]; then if [[ ! -f "/config/crontabs/${cron_user}" ]]; then
# copy crontab from system # copy crontab from system
if crontab -l -u "${cron_user}" >/dev/null; then if crontab -l -u "${cron_user}" >/dev/null 2>&1; then
crontab -l -u "${cron_user}" >"/config/crontabs/${cron_user}" crontab -l -u "${cron_user}" >"/config/crontabs/${cron_user}"
fi fi

View file

@ -1,17 +1,19 @@
#!/usr/bin/with-contenv bash #!/usr/bin/with-contenv bash
# shellcheck shell=bash # shellcheck shell=bash
if find /run/s6/container_environment/*"FILE__"* -maxdepth 1 > /dev/null 2>&1; then if find /run/s6/container_environment/FILE__* -maxdepth 1 > /dev/null 2>&1; then
for FILENAME in /run/s6/container_environment/*; do for FILENAME in /run/s6/container_environment/FILE__*; do
if [[ "${FILENAME##*/}" == "FILE__"* ]]; then
SECRETFILE=$(cat "${FILENAME}") SECRETFILE=$(cat "${FILENAME}")
if [[ -f ${SECRETFILE} ]]; then if [[ -f ${SECRETFILE} ]]; then
FILESTRIP=${FILENAME//FILE__/} FILESTRIP=${FILENAME//FILE__/}
if [[ $(tail -n1 "${SECRETFILE}" | wc -l) != 0 ]]; then
echo "[env-init] Your secret: ${FILENAME##*/}"
echo " contains a trailing newline and may not work as expected"
fi
cat "${SECRETFILE}" >"${FILESTRIP}" cat "${SECRETFILE}" >"${FILESTRIP}"
echo "[env-init] ${FILESTRIP##*/} set from ${FILENAME##*/}" echo "[env-init] ${FILESTRIP##*/} set from ${FILENAME##*/}"
else else
echo "[env-init] cannot find secret in ${FILENAME##*/}" echo "[env-init] cannot find secret in ${FILENAME##*/}"
fi fi
fi
done done
fi fi

View file

@ -18,7 +18,6 @@ for MIGRATION in $(find ${MIGRATIONS_DIR}/* | sort -n); do
continue continue
fi fi
echo "[migrations] ${NAME}: executing..." echo "[migrations] ${NAME}: executing..."
chmod +x "${MIGRATION}"
# Execute migration script in a subshell to prevent it from modifying the current environment # Execute migration script in a subshell to prevent it from modifying the current environment
("${MIGRATION}") ("${MIGRATION}")
EXIT_CODE=$? EXIT_CODE=$?

View file

@ -1,8 +1,8 @@
#!/usr/bin/with-contenv bash #!/usr/bin/with-contenv bash
# shellcheck shell=bash # shellcheck shell=bash
if builtin command -v crontab >/dev/null && [[ -n "$(crontab -l -u abc)" || -n "$(crontab -l -u root)" ]]; then if builtin command -v crontab >/dev/null 2>&1 && [[ -n "$(crontab -l -u abc 2>/dev/null || true)" || -n "$(crontab -l -u root 2>/dev/null || true)" ]]; then
if builtin command -v busybox >/dev/null && [[ $(busybox) =~ [[:space:]](crond)([,]|$) ]]; then if builtin command -v busybox >/dev/null 2>&1 && [[ $(busybox || true) =~ [[:space:]](crond)([,]|$) ]]; then
exec busybox crond -f -S -l 5 exec busybox crond -f -S -l 5
elif [[ -f /usr/bin/apt ]] && [[ -f /usr/sbin/cron ]]; then elif [[ -f /usr/bin/apt ]] && [[ -f /usr/sbin/cron ]]; then
exec /usr/sbin/cron -f -L 5 exec /usr/sbin/cron -f -L 5