mirror of
https://github.com/zabbix/zabbix-docker.git
synced 2025-02-17 18:30:50 +01:00
Merge branch '5.0' into workflow_5.0
This commit is contained in:
commit
957e461336
48
.github/scripts/rhel_description.py
vendored
Normal file
48
.github/scripts/rhel_description.py
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import sys
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import markdown
|
||||||
|
import os
|
||||||
|
|
||||||
|
repository_description = None
|
||||||
|
|
||||||
|
if ("DESCRIPTION_FILE" not in os.environ or len(os.environ["DESCRIPTION_FILE"]) == 0):
|
||||||
|
print("::error::Description file environment variable is not specified")
|
||||||
|
sys.exit(1)
|
||||||
|
if ("PYXIS_API_TOKEN" not in os.environ or len(os.environ["PYXIS_API_TOKEN"]) == 0):
|
||||||
|
print("::error::API token environment variable is not specified")
|
||||||
|
sys.exit(1)
|
||||||
|
if ("API_URL" not in os.environ or len(os.environ["API_URL"]) == 0):
|
||||||
|
print("::error::API URL environment variable is not specified")
|
||||||
|
sys.exit(1)
|
||||||
|
if ("PROJECT_ID" not in os.environ or len(os.environ["PROJECT_ID"]) == 0):
|
||||||
|
print("RedHat project ID environment variable is not specified")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if (os.path.isfile(os.environ["DESCRIPTION_FILE"] + '.md')):
|
||||||
|
file = open(os.environ["DESCRIPTION_FILE"] + '.md', mode='r')
|
||||||
|
markdown_data = file.read()
|
||||||
|
file.close()
|
||||||
|
repository_description=markdown.markdown(markdown_data)
|
||||||
|
elif (os.path.isfile(os.environ["DESCRIPTION_FILE"] + '.html')):
|
||||||
|
file = open(os.environ["DESCRIPTION_FILE"] + '.html', mode='r')
|
||||||
|
repository_description = file.read()
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
if (repository_description is None or len(repository_description) == 0):
|
||||||
|
print("::error::No description file found")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
data = dict()
|
||||||
|
data['container'] = dict()
|
||||||
|
data['container']['repository_description'] = repository_description[:32768]
|
||||||
|
|
||||||
|
headers = {'accept' : 'application/json', 'X-API-KEY' : os.environ["PYXIS_API_TOKEN"], 'Content-Type' : 'application/json'}
|
||||||
|
result = requests.patch(os.environ["API_URL"] + os.environ["PROJECT_ID"],
|
||||||
|
headers = headers,
|
||||||
|
data = json.dumps(data))
|
||||||
|
|
||||||
|
print("::group::Result")
|
||||||
|
print("Response code: " + str(result.status_code))
|
||||||
|
print("Last update date: " + json.loads(result.content)['last_update_date'])
|
||||||
|
print("::endgroup::")
|
3
.github/workflows/dependency-review.yml
vendored
3
.github/workflows/dependency-review.yml
vendored
@ -11,6 +11,7 @@ on: [pull_request]
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
dependency-review:
|
dependency-review:
|
||||||
@ -28,4 +29,4 @@ jobs:
|
|||||||
- name: 'Checkout Repository'
|
- name: 'Checkout Repository'
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
- name: 'Dependency Review'
|
- name: 'Dependency Review'
|
||||||
uses: actions/dependency-review-action@80f10bf419f34980065523f5efca7ebed17576aa # v4.1.0
|
uses: actions/dependency-review-action@be8bc500ee15e96754d2a6f2d34be14e945a46f3 # v4.1.2
|
||||||
|
445
.github/workflows/images_build.yml
vendored
445
.github/workflows/images_build.yml
vendored
@ -28,7 +28,7 @@ permissions:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
TRUNK_ONLY_EVENT: ${{ contains(fromJSON('["schedule"]'), github.event_name) }}
|
TRUNK_ONLY_EVENT: ${{ contains(fromJSON('["schedule"]'), github.event_name) }}
|
||||||
AUTO_PUSH_IMAGES: ${{ vars.AUTO_PUSH_IMAGES }}
|
AUTO_PUSH_IMAGES: ${{ ! contains(fromJSON('["workflow_dispatch"]'), github.event_name) && vars.AUTO_PUSH_IMAGES }}
|
||||||
|
|
||||||
DOCKER_REPOSITORY: ${{ vars.DOCKER_REPOSITORY }}
|
DOCKER_REPOSITORY: ${{ vars.DOCKER_REPOSITORY }}
|
||||||
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
|
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
|
||||||
@ -36,12 +36,17 @@ env:
|
|||||||
IMAGES_PREFIX: "zabbix-"
|
IMAGES_PREFIX: "zabbix-"
|
||||||
|
|
||||||
BASE_BUILD_NAME: "build-base"
|
BASE_BUILD_NAME: "build-base"
|
||||||
|
BASE_CACHE_FILE_NAME: "base_image_metadata.json"
|
||||||
|
BUILD_CACHE_FILE_NAME: "base_build_image_metadata.json"
|
||||||
|
|
||||||
MATRIX_FILE: "build.json"
|
MATRIX_FILE: "build.json"
|
||||||
DOCKERFILES_DIRECTORY: "./Dockerfiles"
|
DOCKERFILES_DIRECTORY: "./Dockerfiles"
|
||||||
|
|
||||||
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
|
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
|
||||||
IDENITY_REGEX: "https://github.com/zabbix/zabbix-docker/.github/"
|
IDENTITY_REGEX: "https://github.com/zabbix/zabbix-docker/.github/"
|
||||||
|
|
||||||
|
DOCKER_REGISTRY_TEST: "ghcr.io"
|
||||||
|
DOCKER_REPOSITORY_TEST: "zabbix"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
init_build:
|
init_build:
|
||||||
@ -63,7 +68,9 @@ jobs:
|
|||||||
disable-sudo: true
|
disable-sudo: true
|
||||||
egress-policy: block
|
egress-policy: block
|
||||||
allowed-endpoints: >
|
allowed-endpoints: >
|
||||||
|
api.github.com:443
|
||||||
github.com:443
|
github.com:443
|
||||||
|
objects.githubusercontent.com:443
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
@ -163,6 +170,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Block egress traffic
|
- name: Block egress traffic
|
||||||
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
||||||
@ -177,23 +185,32 @@ jobs:
|
|||||||
atl.mirrors.knownhost.com:443
|
atl.mirrors.knownhost.com:443
|
||||||
atl.mirrors.knownhost.com:80
|
atl.mirrors.knownhost.com:80
|
||||||
auth.docker.io:443
|
auth.docker.io:443
|
||||||
|
bungi.mm.fcix.net:443
|
||||||
|
bungi.mm.fcix.net:80
|
||||||
cdn03.quay.io:443
|
cdn03.quay.io:443
|
||||||
centos-distro.1gservers.com:80
|
centos-distro.1gservers.com:80
|
||||||
|
centos-distro.cavecreek.net:80
|
||||||
centos-stream-distro.1gservers.com:443
|
centos-stream-distro.1gservers.com:443
|
||||||
centos-stream-distro.1gservers.com:80
|
centos-stream-distro.1gservers.com:80
|
||||||
centos.hivelocity.net:80
|
centos.hivelocity.net:80
|
||||||
centos.mirror.constant.com:80
|
centos.mirror.constant.com:80
|
||||||
centos.mirror.shastacoe.net:80
|
centos.mirror.shastacoe.net:80
|
||||||
|
coresite.mm.fcix.net:80
|
||||||
|
d2lzkl7pfhq30w.cloudfront.net:443
|
||||||
dfw.mirror.rackspace.com:443
|
dfw.mirror.rackspace.com:443
|
||||||
dfw.mirror.rackspace.com:80
|
dfw.mirror.rackspace.com:80
|
||||||
distro.ibiblio.org:80
|
distro.ibiblio.org:80
|
||||||
dl-cdn.alpinelinux.org:443
|
dl-cdn.alpinelinux.org:443
|
||||||
dl.google.com:443
|
|
||||||
download.cf.centos.org:443
|
download.cf.centos.org:443
|
||||||
download.cf.centos.org:80
|
download.cf.centos.org:80
|
||||||
epel.gb.ssimn.org:443
|
epel.gb.ssimn.org:443
|
||||||
epel.mirror.constant.com:443
|
epel.mirror.constant.com:443
|
||||||
epel.mirror.constant.com:80
|
epel.mirror.constant.com:80
|
||||||
|
epel.stl.us.ssimn.org:443
|
||||||
|
epel.stl.us.ssimn.org:80
|
||||||
|
fedora.nyherji.is:443
|
||||||
|
fedora.nyherji.is:80
|
||||||
|
forksystems.mm.fcix.net:443
|
||||||
forksystems.mm.fcix.net:80
|
forksystems.mm.fcix.net:80
|
||||||
ftp-nyc.osuosl.org:443
|
ftp-nyc.osuosl.org:443
|
||||||
ftp-nyc.osuosl.org:80
|
ftp-nyc.osuosl.org:80
|
||||||
@ -202,31 +219,44 @@ jobs:
|
|||||||
ftp.agdsn.de:443
|
ftp.agdsn.de:443
|
||||||
ftp.agdsn.de:80
|
ftp.agdsn.de:80
|
||||||
ftp.fau.de:443
|
ftp.fau.de:443
|
||||||
|
ftp.fau.de:80
|
||||||
ftp.halifax.rwth-aachen.de:443
|
ftp.halifax.rwth-aachen.de:443
|
||||||
ftp.halifax.rwth-aachen.de:80
|
ftp.halifax.rwth-aachen.de:80
|
||||||
ftp.osuosl.org:80
|
ftp.osuosl.org:80
|
||||||
ftp.plusline.net:443
|
ftp.plusline.net:443
|
||||||
ftp.plusline.net:80
|
ftp.plusline.net:80
|
||||||
|
ftp.uni-stuttgart.de:80
|
||||||
ftpmirror.your.org:80
|
ftpmirror.your.org:80
|
||||||
fulcio.sigstore.dev:443
|
fulcio.sigstore.dev:443
|
||||||
|
ghcr.io:443
|
||||||
github.com:443
|
github.com:443
|
||||||
iad.mirror.rackspace.com:443
|
iad.mirror.rackspace.com:443
|
||||||
iad.mirror.rackspace.com:80
|
iad.mirror.rackspace.com:80
|
||||||
|
ima.mm.fcix.net:80
|
||||||
index.docker.io:443
|
index.docker.io:443
|
||||||
|
ix-denver.mm.fcix.net:443
|
||||||
ix-denver.mm.fcix.net:80
|
ix-denver.mm.fcix.net:80
|
||||||
keyserver.ubuntu.com:11371
|
|
||||||
la.mirrors.clouvider.net:80
|
la.mirrors.clouvider.net:80
|
||||||
lesnet.mm.fcix.net:443
|
lesnet.mm.fcix.net:443
|
||||||
lesnet.mm.fcix.net:80
|
lesnet.mm.fcix.net:80
|
||||||
|
level66.mm.fcix.net:80
|
||||||
linux-mirrors.fnal.gov:80
|
linux-mirrors.fnal.gov:80
|
||||||
|
linux.cc.lehigh.edu:80
|
||||||
|
linux.mirrors.es.net:80
|
||||||
mirror-mci.yuki.net.uk:443
|
mirror-mci.yuki.net.uk:443
|
||||||
mirror-mci.yuki.net.uk:80
|
mirror-mci.yuki.net.uk:80
|
||||||
mirror.23m.com:443
|
mirror.23m.com:443
|
||||||
|
mirror.23m.com:80
|
||||||
mirror.arizona.edu:443
|
mirror.arizona.edu:443
|
||||||
mirror.arizona.edu:80
|
mirror.arizona.edu:80
|
||||||
mirror.ash.fastserv.com:80
|
mirror.ash.fastserv.com:80
|
||||||
|
mirror.centos.iad1.serverforge.org:80
|
||||||
mirror.chpc.utah.edu:80
|
mirror.chpc.utah.edu:80
|
||||||
mirror.clarkson.edu:80
|
mirror.clarkson.edu:80
|
||||||
|
mirror.cogentco.com:80
|
||||||
|
mirror.dal.nexril.net:443
|
||||||
|
mirror.dal.nexril.net:80
|
||||||
|
mirror.datto.com:80
|
||||||
mirror.de.leaseweb.net:443
|
mirror.de.leaseweb.net:443
|
||||||
mirror.de.leaseweb.net:80
|
mirror.de.leaseweb.net:80
|
||||||
mirror.dogado.de:443
|
mirror.dogado.de:443
|
||||||
@ -235,13 +265,19 @@ jobs:
|
|||||||
mirror.facebook.net:443
|
mirror.facebook.net:443
|
||||||
mirror.facebook.net:80
|
mirror.facebook.net:80
|
||||||
mirror.fcix.net:443
|
mirror.fcix.net:443
|
||||||
|
mirror.fcix.net:80
|
||||||
mirror.genesishosting.com:80
|
mirror.genesishosting.com:80
|
||||||
mirror.grid.uchicago.edu:80
|
mirror.grid.uchicago.edu:80
|
||||||
mirror.hoobly.com:443
|
mirror.hoobly.com:443
|
||||||
mirror.hoobly.com:80
|
mirror.hoobly.com:80
|
||||||
|
mirror.imt-systems.com:443
|
||||||
|
mirror.imt-systems.com:80
|
||||||
mirror.keystealth.org:80
|
mirror.keystealth.org:80
|
||||||
|
mirror.lstn.net:443
|
||||||
|
mirror.lstn.net:80
|
||||||
mirror.math.princeton.edu:443
|
mirror.math.princeton.edu:443
|
||||||
mirror.math.princeton.edu:80
|
mirror.math.princeton.edu:80
|
||||||
|
mirror.metrocast.net:443
|
||||||
mirror.metrocast.net:80
|
mirror.metrocast.net:80
|
||||||
mirror.netcologne.de:443
|
mirror.netcologne.de:443
|
||||||
mirror.netcologne.de:80
|
mirror.netcologne.de:80
|
||||||
@ -256,6 +292,7 @@ jobs:
|
|||||||
mirror.scaleuptech.com:80
|
mirror.scaleuptech.com:80
|
||||||
mirror.servaxnet.com:443
|
mirror.servaxnet.com:443
|
||||||
mirror.servaxnet.com:80
|
mirror.servaxnet.com:80
|
||||||
|
mirror.sfo12.us.leaseweb.net:443
|
||||||
mirror.sfo12.us.leaseweb.net:80
|
mirror.sfo12.us.leaseweb.net:80
|
||||||
mirror.siena.edu:80
|
mirror.siena.edu:80
|
||||||
mirror.steadfastnet.com:80
|
mirror.steadfastnet.com:80
|
||||||
@ -270,6 +307,7 @@ jobs:
|
|||||||
mirror.vacares.com:80
|
mirror.vacares.com:80
|
||||||
mirror.vtti.vt.edu:80
|
mirror.vtti.vt.edu:80
|
||||||
mirror.wdc2.us.leaseweb.net:80
|
mirror.wdc2.us.leaseweb.net:80
|
||||||
|
mirror.web-ster.com:80
|
||||||
mirror1.hs-esslingen.de:443
|
mirror1.hs-esslingen.de:443
|
||||||
mirror1.hs-esslingen.de:80
|
mirror1.hs-esslingen.de:80
|
||||||
mirrorlist.centos.org:80
|
mirrorlist.centos.org:80
|
||||||
@ -296,29 +334,39 @@ jobs:
|
|||||||
mirrors.syringanetworks.net:80
|
mirrors.syringanetworks.net:80
|
||||||
mirrors.tscak.com:80
|
mirrors.tscak.com:80
|
||||||
mirrors.unifiedlayer.com:80
|
mirrors.unifiedlayer.com:80
|
||||||
|
mirrors.vcea.wsu.edu:80
|
||||||
mirrors.wcupa.edu:443
|
mirrors.wcupa.edu:443
|
||||||
mirrors.wcupa.edu:80
|
mirrors.wcupa.edu:80
|
||||||
|
mirrors.xmission.com:80
|
||||||
mirrors.xtom.com:80
|
mirrors.xtom.com:80
|
||||||
mirrors.xtom.de:443
|
mirrors.xtom.de:443
|
||||||
mirrors.xtom.de:80
|
mirrors.xtom.de:80
|
||||||
mnvoip.mm.fcix.net:80
|
mnvoip.mm.fcix.net:80
|
||||||
na.edge.kernel.org:443
|
na.edge.kernel.org:443
|
||||||
nc-centos-mirror.iwebfusion.net:80
|
nc-centos-mirror.iwebfusion.net:80
|
||||||
|
nnenix.mm.fcix.net:443
|
||||||
nnenix.mm.fcix.net:80
|
nnenix.mm.fcix.net:80
|
||||||
nocix.mm.fcix.net:443
|
nocix.mm.fcix.net:443
|
||||||
nocix.mm.fcix.net:80
|
nocix.mm.fcix.net:80
|
||||||
|
nyc.mirrors.clouvider.net:80
|
||||||
oauth2.sigstore.dev:443
|
oauth2.sigstore.dev:443
|
||||||
objects.githubusercontent.com:443
|
objects.githubusercontent.com:443
|
||||||
|
ohioix.mm.fcix.net:443
|
||||||
ohioix.mm.fcix.net:80
|
ohioix.mm.fcix.net:80
|
||||||
opencolo.mm.fcix.net:443
|
opencolo.mm.fcix.net:443
|
||||||
opencolo.mm.fcix.net:80
|
opencolo.mm.fcix.net:80
|
||||||
or-mirror.iwebfusion.net:80
|
or-mirror.iwebfusion.net:80
|
||||||
packages.oit.ncsu.edu:80
|
packages.oit.ncsu.edu:80
|
||||||
paducahix.mm.fcix.net:80
|
paducahix.mm.fcix.net:80
|
||||||
|
pkg-containers.githubusercontent.com:443
|
||||||
ports.ubuntu.com:443
|
ports.ubuntu.com:443
|
||||||
ports.ubuntu.com:80
|
ports.ubuntu.com:80
|
||||||
production.cloudflare.docker.com:443
|
production.cloudflare.docker.com:443
|
||||||
|
pubmirror1.math.uh.edu:443
|
||||||
|
pubmirror1.math.uh.edu:80
|
||||||
pubmirror2.math.uh.edu:80
|
pubmirror2.math.uh.edu:80
|
||||||
|
pubmirror3.math.uh.edu:443
|
||||||
|
pubmirror3.math.uh.edu:80
|
||||||
quay.io:443
|
quay.io:443
|
||||||
registry-1.docker.io:443
|
registry-1.docker.io:443
|
||||||
rekor.sigstore.dev:443
|
rekor.sigstore.dev:443
|
||||||
@ -333,12 +381,14 @@ jobs:
|
|||||||
scientificlinux.physik.uni-muenchen.de:80
|
scientificlinux.physik.uni-muenchen.de:80
|
||||||
security.ubuntu.com:443
|
security.ubuntu.com:443
|
||||||
security.ubuntu.com:80
|
security.ubuntu.com:80
|
||||||
|
southfront.mm.fcix.net:443
|
||||||
southfront.mm.fcix.net:80
|
southfront.mm.fcix.net:80
|
||||||
tuf-repo-cdn.sigstore.dev:443
|
tuf-repo-cdn.sigstore.dev:443
|
||||||
tx-mirror.tier.net:80
|
tx-mirror.tier.net:80
|
||||||
us.mirrors.virtono.com:80
|
us.mirrors.virtono.com:80
|
||||||
uvermont.mm.fcix.net:443
|
uvermont.mm.fcix.net:443
|
||||||
uvermont.mm.fcix.net:80
|
uvermont.mm.fcix.net:80
|
||||||
|
volico.mm.fcix.net:443
|
||||||
volico.mm.fcix.net:80
|
volico.mm.fcix.net:80
|
||||||
www.gtlib.gatech.edu:80
|
www.gtlib.gatech.edu:80
|
||||||
yum.oracle.com:443
|
yum.oracle.com:443
|
||||||
@ -352,11 +402,13 @@ jobs:
|
|||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.3'
|
cosign-release: 'v2.2.3'
|
||||||
|
|
||||||
- name: Check cosign version
|
- name: Check cosign version
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
run: cosign version
|
run: cosign version
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@ -370,12 +422,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
driver-opts: image=moby/buildkit:master
|
driver-opts: image=moby/buildkit:master
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Prepare Platform list
|
- name: Prepare Platform list
|
||||||
id: platform
|
id: platform
|
||||||
env:
|
env:
|
||||||
@ -395,37 +441,85 @@ jobs:
|
|||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
||||||
with:
|
with:
|
||||||
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_NAME }}
|
images: |
|
||||||
|
${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY_TEST, env.DOCKER_REPOSITORY_TEST, env.IMAGES_PREFIX, env.BASE_BUILD_NAME ) }},enable=${{ env.AUTO_PUSH_IMAGES != 'true' }}
|
||||||
|
${{ format('{0}/{1}{2}', env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, env.BASE_BUILD_NAME ) }},enable=${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && !contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,suffix=-${{ matrix.os }}-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && !contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,suffix=-${{ matrix.os }}-latest
|
||||||
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,suffix=-${{ matrix.os }}
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' || contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,suffix=-${{ matrix.os }}
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
latest=${{ (matrix.os == 'alpine') && (!contains(fromJSON('["workflow_dispatch"]'), github.event_name)) && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
||||||
|
|
||||||
|
- name: Prepare cache data
|
||||||
|
id: cache_data
|
||||||
|
env:
|
||||||
|
IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
||||||
|
PUBLISH_IMAGES: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
|
run: |
|
||||||
|
cache_from=()
|
||||||
|
cache_to=()
|
||||||
|
|
||||||
|
cache_from+=("type=gha,scope=${IMAGE_TAG}")
|
||||||
|
#cache_from+=("type=registry,ref=${IMAGE_TAG}")
|
||||||
|
|
||||||
|
cache_to+=("type=gha,mode=max,scope=${IMAGE_TAG}")
|
||||||
|
|
||||||
|
echo "::group::Cache from data"
|
||||||
|
echo "${cache_from[*]}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "::group::Cache to data"
|
||||||
|
echo "${cache_to[*]}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
cache_from=$(printf '%s\n' "${cache_from[@]}")
|
||||||
|
cache_to=$(printf '%s\n' "${cache_to[@]}")
|
||||||
|
|
||||||
|
echo 'cache_from<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo "$cache_from" >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'cache_to<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo "$cache_to" >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Login to ${{ env.DOCKER_REGISTRY_TEST }}
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES != 'true' }}
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||||
|
with:
|
||||||
|
registry: ${{ env.DOCKER_REGISTRY_TEST }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and publish image
|
- name: Build and publish image
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}
|
context: ${{ format('{0}/{1}/{2}', env.DOCKERFILES_DIRECTORY, env.BASE_BUILD_NAME, matrix.os) }}
|
||||||
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}/Dockerfile
|
file: ${{ format('{0}/{1}/{2}/Dockerfile', env.DOCKERFILES_DIRECTORY, env.BASE_BUILD_NAME, matrix.os) }}
|
||||||
platforms: ${{ steps.platform.outputs.list }}
|
platforms: ${{ steps.platform.outputs.list }}
|
||||||
push: ${{ env.AUTO_PUSH_IMAGES }}
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||||
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
||||||
cache-from: |
|
cache-from: ${{ steps.cache_data.outputs.cache_from }}
|
||||||
type=gha,scope=${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
cache-to: ${{ steps.cache_data.outputs.cache_to }}
|
||||||
type=registry,ref=docker.io/${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
|
||||||
cache-to: type=gha,mode=max,scope=${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
|
||||||
|
|
||||||
- name: Sign the images with GitHub OIDC Token
|
- name: Sign the images with GitHub OIDC Token
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
TAGS: ${{ steps.meta.outputs.tags }}
|
||||||
@ -444,24 +538,24 @@ jobs:
|
|||||||
cosign sign --yes ${images}
|
cosign sign --yes ${images}
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image metadata
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
CACHE_FILE_NAME: ${{ env.BASE_CACHE_FILE_NAME }}
|
||||||
CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
|
METADATA: ${{ steps.docker_build.outputs.metadata }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Image digest"
|
echo "::group::Image metadata"
|
||||||
echo "$DIGEST"
|
echo "${METADATA}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
echo "::group::Cache file name"
|
echo "::group::Cache file name"
|
||||||
echo "$CACHE_FILE_NAME"
|
echo "${CACHE_FILE_NAME}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "$DIGEST" > "$CACHE_FILE_NAME"
|
echo "${METADATA}" > "$CACHE_FILE_NAME"
|
||||||
|
|
||||||
- name: Cache image digest
|
- name: Cache image metadata
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
||||||
with:
|
with:
|
||||||
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
|
path: ${{ env.BASE_CACHE_FILE_NAME }}
|
||||||
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
|
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
|
||||||
|
|
||||||
build_base_database:
|
build_base_database:
|
||||||
@ -473,11 +567,11 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
build: ${{ fromJson(needs.init_build.outputs.database) }}
|
build: ${{ fromJson(needs.init_build.outputs.database) }}
|
||||||
os: ${{ fromJson(needs.init_build.outputs.os) }}
|
os: ${{ fromJson(needs.init_build.outputs.os) }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Block egress traffic
|
- name: Block egress traffic
|
||||||
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
||||||
@ -494,6 +588,7 @@ jobs:
|
|||||||
golang.org:443
|
golang.org:443
|
||||||
google.golang.org:443
|
google.golang.org:443
|
||||||
gopkg.in:443
|
gopkg.in:443
|
||||||
|
ghcr.io:443
|
||||||
index.docker.io:443
|
index.docker.io:443
|
||||||
noto-website.storage.googleapis.com:443
|
noto-website.storage.googleapis.com:443
|
||||||
production.cloudflare.docker.com:443
|
production.cloudflare.docker.com:443
|
||||||
@ -505,6 +600,7 @@ jobs:
|
|||||||
objects.githubusercontent.com:443
|
objects.githubusercontent.com:443
|
||||||
tuf-repo-cdn.sigstore.dev:443
|
tuf-repo-cdn.sigstore.dev:443
|
||||||
rekor.sigstore.dev:443
|
rekor.sigstore.dev:443
|
||||||
|
pkg-containers.githubusercontent.com:443
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
@ -513,11 +609,13 @@ jobs:
|
|||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.3'
|
cosign-release: 'v2.2.3'
|
||||||
|
|
||||||
- name: Check cosign version
|
- name: Check cosign version
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
run: cosign version
|
run: cosign version
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@ -531,12 +629,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
driver-opts: image=moby/buildkit:master
|
driver-opts: image=moby/buildkit:master
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Prepare Platform list
|
- name: Prepare Platform list
|
||||||
id: platform
|
id: platform
|
||||||
env:
|
env:
|
||||||
@ -556,82 +648,127 @@ jobs:
|
|||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
||||||
with:
|
with:
|
||||||
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.build }}
|
images: |
|
||||||
|
${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY_TEST, env.DOCKER_REPOSITORY_TEST, env.IMAGES_PREFIX, matrix.build ) }},enable=${{ env.AUTO_PUSH_IMAGES != 'true' }}
|
||||||
|
${{ format('{0}/{1}{2}', env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, matrix.build ) }},enable=${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && (!contains(fromJSON('["workflow_dispatch"]'), github.event_name)) }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,suffix=-${{ matrix.os }}-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && (!contains(fromJSON('["workflow_dispatch"]'), github.event_name)) }},event=branch,suffix=-${{ matrix.os }}-latest
|
||||||
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,suffix=-${{ matrix.os }}
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' || contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,suffix=-${{ matrix.os }}
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
latest=${{ (matrix.os == 'alpine') && (!contains(fromJSON('["workflow_dispatch"]'), github.event_name)) && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
||||||
|
|
||||||
- name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }}
|
- name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }}
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
||||||
with:
|
with:
|
||||||
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
|
path: ${{ env.BASE_CACHE_FILE_NAME }}
|
||||||
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
|
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
|
||||||
|
|
||||||
- name: Retrieve ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }} SHA256 tag
|
- name: Process ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }} image metadata
|
||||||
id: base_build
|
id: base_build
|
||||||
env:
|
env:
|
||||||
MATRIX_OS: ${{ matrix.os }}
|
CACHE_FILE_NAME: ${{ env.BASE_CACHE_FILE_NAME }}
|
||||||
DOCKER_REPOSITORY: ${{ env.DOCKER_REPOSITORY }}
|
|
||||||
BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
|
|
||||||
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
|
|
||||||
run: |
|
run: |
|
||||||
BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_OS}")
|
echo "::group::Base image metadata"
|
||||||
BUILD_BASE_IMAGE="${DOCKER_REPOSITORY}/${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
|
cat "${CACHE_FILE_NAME}"
|
||||||
|
|
||||||
echo "::group::Base build image information"
|
|
||||||
echo "base_tag=${BASE_TAG}"
|
|
||||||
echo "base_build_image=${BUILD_BASE_IMAGE}"
|
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
|
IMAGE_DIGEST=$(jq -r '."containerimage.digest"' "${CACHE_FILE_NAME}")
|
||||||
echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
|
IMAGE_NAME=$(jq -r '."image.name"' "${CACHE_FILE_NAME}" | cut -d: -f1)
|
||||||
|
|
||||||
|
echo "base_build_image=${IMAGE_NAME}@${IMAGE_DIGEST}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Verify ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }} cosign
|
- name: Verify ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }} cosign
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
|
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
|
||||||
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
|
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
|
||||||
IDENITY_REGEX: ${{ env.IDENITY_REGEX }}
|
IDENTITY_REGEX: ${{ env.IDENTITY_REGEX }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Image sign data"
|
echo "::group::Image sign data"
|
||||||
echo "OIDC issuer=$OIDC_ISSUER"
|
echo "OIDC issuer=$OIDC_ISSUER"
|
||||||
echo "Identity=$IDENITY_REGEX"
|
echo "Identity=$IDENTITY_REGEX"
|
||||||
echo "Image to verify=$BASE_IMAGE"
|
echo "Image to verify=$BASE_IMAGE"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "::group::Verify signature"
|
echo "::group::Verify signature"
|
||||||
cosign verify \
|
cosign verify \
|
||||||
--certificate-oidc-issuer-regexp "$OIDC_ISSUER" \
|
--certificate-oidc-issuer-regexp "$OIDC_ISSUER" \
|
||||||
--certificate-identity-regexp "$IDENITY_REGEX" \
|
--certificate-identity-regexp "$IDENTITY_REGEX" \
|
||||||
"$BASE_IMAGE"
|
"$BASE_IMAGE"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Prepare cache data
|
||||||
|
id: cache_data
|
||||||
|
env:
|
||||||
|
BASE_IMAGE_TAG: ${{ steps.base_build.outputs.base_build_image }}
|
||||||
|
IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
||||||
|
PUBLISH_IMAGES: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
|
run: |
|
||||||
|
cache_from=()
|
||||||
|
cache_to=()
|
||||||
|
|
||||||
|
cache_from+=("type=gha,scope=${BASE_IMAGE_TAG}")
|
||||||
|
cache_from+=("type=registry,ref=${BASE_IMAGE_TAG}")
|
||||||
|
cache_from+=("type=gha,scope=${IMAGE_TAG}")
|
||||||
|
cache_from+=("type=registry,ref=${IMAGE_TAG}")
|
||||||
|
|
||||||
|
cache_to+=("type=gha,mode=max,scope=${IMAGE_TAG}")
|
||||||
|
|
||||||
|
echo "::group::Cache from data"
|
||||||
|
echo "${cache_from[*]}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "::group::Cache to data"
|
||||||
|
echo "${cache_to[*]}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
cache_from=$(printf '%s\n' "${cache_from[@]}")
|
||||||
|
cache_to=$(printf '%s\n' "${cache_to[@]}")
|
||||||
|
|
||||||
|
echo 'cache_from<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo "$cache_from" >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'cache_to<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo "$cache_to" >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Login to ${{ env.DOCKER_REGISTRY_TEST }}
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES != 'true' }}
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||||
|
with:
|
||||||
|
registry: ${{ env.DOCKER_REGISTRY_TEST }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build ${{ matrix.build }}/${{ matrix.os }} and push
|
- name: Build ${{ matrix.build }}/${{ matrix.os }} and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
|
context: ${{ format('{0}/{1}/{2}/', env.DOCKERFILES_DIRECTORY, matrix.build, matrix.os) }}
|
||||||
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
|
file: ${{ format('{0}/{1}/{2}/Dockerfile', env.DOCKERFILES_DIRECTORY, matrix.build, matrix.os) }}
|
||||||
platforms: ${{ steps.platform.outputs.list }}
|
platforms: ${{ steps.platform.outputs.list }}
|
||||||
push: ${{ env.AUTO_PUSH_IMAGES }}
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||||
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
||||||
cache-from: |
|
|
||||||
type=gha,scope=${{ steps.base_build.outputs.base_build_image }}
|
|
||||||
type=registry,ref=${{ steps.base_build.outputs.base_build_image }}
|
|
||||||
cache-to: type=gha,mode=max,scope=${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
|
||||||
|
|
||||||
- name: Sign the images with GitHub OIDC Token
|
- name: Sign the images with GitHub OIDC Token
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
TAGS: ${{ steps.meta.outputs.tags }}
|
||||||
@ -650,23 +787,24 @@ jobs:
|
|||||||
cosign sign --yes ${images}
|
cosign sign --yes ${images}
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image metadata
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
CACHE_FILE_NAME: ${{ env.BUILD_CACHE_FILE_NAME }}
|
||||||
CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.os }}
|
METADATA: ${{ steps.docker_build.outputs.metadata }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Image digest"
|
echo "::group::Image metadata"
|
||||||
echo "$DIGEST"
|
echo "${METADATA}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
echo "::group::Cache file name"
|
echo "::group::Cache file name"
|
||||||
echo "$CACHE_FILE_NAME"
|
echo "${CACHE_FILE_NAME}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
echo "$DIGEST" > $CACHE_FILE_NAME
|
|
||||||
|
|
||||||
- name: Caching SHA256 tag of the image
|
echo "${METADATA}" > "$CACHE_FILE_NAME"
|
||||||
|
|
||||||
|
- name: Cache image metadata
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
||||||
with:
|
with:
|
||||||
path: ${{ matrix.build }}_${{ matrix.os }}
|
path: ${{ env.BUILD_CACHE_FILE_NAME }}
|
||||||
key: ${{ matrix.build }}-${{ matrix.os }}-${{ github.run_id }}
|
key: ${{ matrix.build }}-${{ matrix.os }}-${{ github.run_id }}
|
||||||
|
|
||||||
build_images:
|
build_images:
|
||||||
@ -683,6 +821,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Block egress traffic
|
- name: Block egress traffic
|
||||||
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
||||||
@ -697,23 +836,33 @@ jobs:
|
|||||||
atl.mirrors.knownhost.com:443
|
atl.mirrors.knownhost.com:443
|
||||||
atl.mirrors.knownhost.com:80
|
atl.mirrors.knownhost.com:80
|
||||||
auth.docker.io:443
|
auth.docker.io:443
|
||||||
|
bungi.mm.fcix.net:443
|
||||||
|
bungi.mm.fcix.net:80
|
||||||
cdn03.quay.io:443
|
cdn03.quay.io:443
|
||||||
centos-distro.1gservers.com:80
|
centos-distro.1gservers.com:80
|
||||||
|
centos-distro.cavecreek.net:80
|
||||||
centos-stream-distro.1gservers.com:443
|
centos-stream-distro.1gservers.com:443
|
||||||
centos-stream-distro.1gservers.com:80
|
centos-stream-distro.1gservers.com:80
|
||||||
centos.hivelocity.net:80
|
centos.hivelocity.net:80
|
||||||
centos.mirror.constant.com:80
|
centos.mirror.constant.com:80
|
||||||
centos.mirror.shastacoe.net:80
|
centos.mirror.shastacoe.net:80
|
||||||
|
coresite.mm.fcix.net:80
|
||||||
|
d2lzkl7pfhq30w.cloudfront.net:443
|
||||||
|
deb.debian.org:80
|
||||||
dfw.mirror.rackspace.com:443
|
dfw.mirror.rackspace.com:443
|
||||||
dfw.mirror.rackspace.com:80
|
dfw.mirror.rackspace.com:80
|
||||||
distro.ibiblio.org:80
|
distro.ibiblio.org:80
|
||||||
dl-cdn.alpinelinux.org:443
|
dl-cdn.alpinelinux.org:443
|
||||||
dl.google.com:443
|
|
||||||
download.cf.centos.org:443
|
download.cf.centos.org:443
|
||||||
download.cf.centos.org:80
|
download.cf.centos.org:80
|
||||||
epel.gb.ssimn.org:443
|
epel.gb.ssimn.org:443
|
||||||
epel.mirror.constant.com:443
|
epel.mirror.constant.com:443
|
||||||
epel.mirror.constant.com:80
|
epel.mirror.constant.com:80
|
||||||
|
epel.stl.us.ssimn.org:443
|
||||||
|
epel.stl.us.ssimn.org:80
|
||||||
|
fedora.nyherji.is:443
|
||||||
|
fedora.nyherji.is:80
|
||||||
|
forksystems.mm.fcix.net:443
|
||||||
forksystems.mm.fcix.net:80
|
forksystems.mm.fcix.net:80
|
||||||
ftp-nyc.osuosl.org:443
|
ftp-nyc.osuosl.org:443
|
||||||
ftp-nyc.osuosl.org:80
|
ftp-nyc.osuosl.org:80
|
||||||
@ -722,31 +871,45 @@ jobs:
|
|||||||
ftp.agdsn.de:443
|
ftp.agdsn.de:443
|
||||||
ftp.agdsn.de:80
|
ftp.agdsn.de:80
|
||||||
ftp.fau.de:443
|
ftp.fau.de:443
|
||||||
|
ftp.fau.de:80
|
||||||
ftp.halifax.rwth-aachen.de:443
|
ftp.halifax.rwth-aachen.de:443
|
||||||
ftp.halifax.rwth-aachen.de:80
|
ftp.halifax.rwth-aachen.de:80
|
||||||
ftp.osuosl.org:80
|
ftp.osuosl.org:80
|
||||||
ftp.plusline.net:443
|
ftp.plusline.net:443
|
||||||
ftp.plusline.net:80
|
ftp.plusline.net:80
|
||||||
|
ftp.uni-stuttgart.de:80
|
||||||
ftpmirror.your.org:80
|
ftpmirror.your.org:80
|
||||||
fulcio.sigstore.dev:443
|
fulcio.sigstore.dev:443
|
||||||
|
ghcr.io:443
|
||||||
github.com:443
|
github.com:443
|
||||||
iad.mirror.rackspace.com:443
|
iad.mirror.rackspace.com:443
|
||||||
iad.mirror.rackspace.com:80
|
iad.mirror.rackspace.com:80
|
||||||
|
ima.mm.fcix.net:80
|
||||||
index.docker.io:443
|
index.docker.io:443
|
||||||
|
ix-denver.mm.fcix.net:443
|
||||||
ix-denver.mm.fcix.net:80
|
ix-denver.mm.fcix.net:80
|
||||||
keyserver.ubuntu.com:11371
|
keyserver.ubuntu.com:11371
|
||||||
la.mirrors.clouvider.net:80
|
la.mirrors.clouvider.net:80
|
||||||
lesnet.mm.fcix.net:443
|
lesnet.mm.fcix.net:443
|
||||||
lesnet.mm.fcix.net:80
|
lesnet.mm.fcix.net:80
|
||||||
|
level66.mm.fcix.net:80
|
||||||
linux-mirrors.fnal.gov:80
|
linux-mirrors.fnal.gov:80
|
||||||
|
linux.cc.lehigh.edu:80
|
||||||
|
linux.mirrors.es.net:80
|
||||||
mirror-mci.yuki.net.uk:443
|
mirror-mci.yuki.net.uk:443
|
||||||
mirror-mci.yuki.net.uk:80
|
mirror-mci.yuki.net.uk:80
|
||||||
mirror.23m.com:443
|
mirror.23m.com:443
|
||||||
|
mirror.23m.com:80
|
||||||
mirror.arizona.edu:443
|
mirror.arizona.edu:443
|
||||||
mirror.arizona.edu:80
|
mirror.arizona.edu:80
|
||||||
mirror.ash.fastserv.com:80
|
mirror.ash.fastserv.com:80
|
||||||
|
mirror.centos.iad1.serverforge.org:80
|
||||||
mirror.chpc.utah.edu:80
|
mirror.chpc.utah.edu:80
|
||||||
mirror.clarkson.edu:80
|
mirror.clarkson.edu:80
|
||||||
|
mirror.cogentco.com:80
|
||||||
|
mirror.dal.nexril.net:443
|
||||||
|
mirror.dal.nexril.net:80
|
||||||
|
mirror.datto.com:80
|
||||||
mirror.de.leaseweb.net:443
|
mirror.de.leaseweb.net:443
|
||||||
mirror.de.leaseweb.net:80
|
mirror.de.leaseweb.net:80
|
||||||
mirror.dogado.de:443
|
mirror.dogado.de:443
|
||||||
@ -755,13 +918,19 @@ jobs:
|
|||||||
mirror.facebook.net:443
|
mirror.facebook.net:443
|
||||||
mirror.facebook.net:80
|
mirror.facebook.net:80
|
||||||
mirror.fcix.net:443
|
mirror.fcix.net:443
|
||||||
|
mirror.fcix.net:80
|
||||||
mirror.genesishosting.com:80
|
mirror.genesishosting.com:80
|
||||||
mirror.grid.uchicago.edu:80
|
mirror.grid.uchicago.edu:80
|
||||||
mirror.hoobly.com:443
|
mirror.hoobly.com:443
|
||||||
mirror.hoobly.com:80
|
mirror.hoobly.com:80
|
||||||
|
mirror.imt-systems.com:443
|
||||||
|
mirror.imt-systems.com:80
|
||||||
mirror.keystealth.org:80
|
mirror.keystealth.org:80
|
||||||
|
mirror.lstn.net:443
|
||||||
|
mirror.lstn.net:80
|
||||||
mirror.math.princeton.edu:443
|
mirror.math.princeton.edu:443
|
||||||
mirror.math.princeton.edu:80
|
mirror.math.princeton.edu:80
|
||||||
|
mirror.metrocast.net:443
|
||||||
mirror.metrocast.net:80
|
mirror.metrocast.net:80
|
||||||
mirror.netcologne.de:443
|
mirror.netcologne.de:443
|
||||||
mirror.netcologne.de:80
|
mirror.netcologne.de:80
|
||||||
@ -776,6 +945,7 @@ jobs:
|
|||||||
mirror.scaleuptech.com:80
|
mirror.scaleuptech.com:80
|
||||||
mirror.servaxnet.com:443
|
mirror.servaxnet.com:443
|
||||||
mirror.servaxnet.com:80
|
mirror.servaxnet.com:80
|
||||||
|
mirror.sfo12.us.leaseweb.net:443
|
||||||
mirror.sfo12.us.leaseweb.net:80
|
mirror.sfo12.us.leaseweb.net:80
|
||||||
mirror.siena.edu:80
|
mirror.siena.edu:80
|
||||||
mirror.steadfastnet.com:80
|
mirror.steadfastnet.com:80
|
||||||
@ -790,6 +960,7 @@ jobs:
|
|||||||
mirror.vacares.com:80
|
mirror.vacares.com:80
|
||||||
mirror.vtti.vt.edu:80
|
mirror.vtti.vt.edu:80
|
||||||
mirror.wdc2.us.leaseweb.net:80
|
mirror.wdc2.us.leaseweb.net:80
|
||||||
|
mirror.web-ster.com:80
|
||||||
mirror1.hs-esslingen.de:443
|
mirror1.hs-esslingen.de:443
|
||||||
mirror1.hs-esslingen.de:80
|
mirror1.hs-esslingen.de:80
|
||||||
mirrorlist.centos.org:80
|
mirrorlist.centos.org:80
|
||||||
@ -816,29 +987,41 @@ jobs:
|
|||||||
mirrors.syringanetworks.net:80
|
mirrors.syringanetworks.net:80
|
||||||
mirrors.tscak.com:80
|
mirrors.tscak.com:80
|
||||||
mirrors.unifiedlayer.com:80
|
mirrors.unifiedlayer.com:80
|
||||||
|
mirrors.vcea.wsu.edu:80
|
||||||
mirrors.wcupa.edu:443
|
mirrors.wcupa.edu:443
|
||||||
mirrors.wcupa.edu:80
|
mirrors.wcupa.edu:80
|
||||||
|
mirrors.xmission.com:80
|
||||||
mirrors.xtom.com:80
|
mirrors.xtom.com:80
|
||||||
mirrors.xtom.de:443
|
mirrors.xtom.de:443
|
||||||
mirrors.xtom.de:80
|
mirrors.xtom.de:80
|
||||||
mnvoip.mm.fcix.net:80
|
mnvoip.mm.fcix.net:80
|
||||||
na.edge.kernel.org:443
|
na.edge.kernel.org:443
|
||||||
nc-centos-mirror.iwebfusion.net:80
|
nc-centos-mirror.iwebfusion.net:80
|
||||||
|
nginx.org:443
|
||||||
|
nginx.org:80
|
||||||
|
nnenix.mm.fcix.net:443
|
||||||
nnenix.mm.fcix.net:80
|
nnenix.mm.fcix.net:80
|
||||||
nocix.mm.fcix.net:443
|
nocix.mm.fcix.net:443
|
||||||
nocix.mm.fcix.net:80
|
nocix.mm.fcix.net:80
|
||||||
|
nyc.mirrors.clouvider.net:80
|
||||||
oauth2.sigstore.dev:443
|
oauth2.sigstore.dev:443
|
||||||
objects.githubusercontent.com:443
|
objects.githubusercontent.com:443
|
||||||
|
ohioix.mm.fcix.net:443
|
||||||
ohioix.mm.fcix.net:80
|
ohioix.mm.fcix.net:80
|
||||||
opencolo.mm.fcix.net:443
|
opencolo.mm.fcix.net:443
|
||||||
opencolo.mm.fcix.net:80
|
opencolo.mm.fcix.net:80
|
||||||
or-mirror.iwebfusion.net:80
|
or-mirror.iwebfusion.net:80
|
||||||
packages.oit.ncsu.edu:80
|
packages.oit.ncsu.edu:80
|
||||||
paducahix.mm.fcix.net:80
|
paducahix.mm.fcix.net:80
|
||||||
|
pkg-containers.githubusercontent.com:443
|
||||||
ports.ubuntu.com:443
|
ports.ubuntu.com:443
|
||||||
ports.ubuntu.com:80
|
ports.ubuntu.com:80
|
||||||
production.cloudflare.docker.com:443
|
production.cloudflare.docker.com:443
|
||||||
|
pubmirror1.math.uh.edu:443
|
||||||
|
pubmirror1.math.uh.edu:80
|
||||||
pubmirror2.math.uh.edu:80
|
pubmirror2.math.uh.edu:80
|
||||||
|
pubmirror3.math.uh.edu:443
|
||||||
|
pubmirror3.math.uh.edu:80
|
||||||
quay.io:443
|
quay.io:443
|
||||||
registry-1.docker.io:443
|
registry-1.docker.io:443
|
||||||
rekor.sigstore.dev:443
|
rekor.sigstore.dev:443
|
||||||
@ -853,12 +1036,14 @@ jobs:
|
|||||||
scientificlinux.physik.uni-muenchen.de:80
|
scientificlinux.physik.uni-muenchen.de:80
|
||||||
security.ubuntu.com:443
|
security.ubuntu.com:443
|
||||||
security.ubuntu.com:80
|
security.ubuntu.com:80
|
||||||
|
southfront.mm.fcix.net:443
|
||||||
southfront.mm.fcix.net:80
|
southfront.mm.fcix.net:80
|
||||||
tuf-repo-cdn.sigstore.dev:443
|
tuf-repo-cdn.sigstore.dev:443
|
||||||
tx-mirror.tier.net:80
|
tx-mirror.tier.net:80
|
||||||
us.mirrors.virtono.com:80
|
us.mirrors.virtono.com:80
|
||||||
uvermont.mm.fcix.net:443
|
uvermont.mm.fcix.net:443
|
||||||
uvermont.mm.fcix.net:80
|
uvermont.mm.fcix.net:80
|
||||||
|
volico.mm.fcix.net:443
|
||||||
volico.mm.fcix.net:80
|
volico.mm.fcix.net:80
|
||||||
www.gtlib.gatech.edu:80
|
www.gtlib.gatech.edu:80
|
||||||
yum.oracle.com:443
|
yum.oracle.com:443
|
||||||
@ -872,11 +1057,13 @@ jobs:
|
|||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.3'
|
cosign-release: 'v2.2.3'
|
||||||
|
|
||||||
- name: Check cosign version
|
- name: Check cosign version
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
run: cosign version
|
run: cosign version
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@ -890,12 +1077,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
driver-opts: image=moby/buildkit:master
|
driver-opts: image=moby/buildkit:master
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Prepare Platform list
|
- name: Prepare Platform list
|
||||||
id: platform
|
id: platform
|
||||||
env:
|
env:
|
||||||
@ -944,98 +1125,107 @@ jobs:
|
|||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
||||||
with:
|
with:
|
||||||
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX}}${{ matrix.build }}
|
images: |
|
||||||
|
${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY_TEST, env.DOCKER_REPOSITORY_TEST, env.IMAGES_PREFIX, matrix.build ) }},enable=${{ env.AUTO_PUSH_IMAGES != 'true' }}
|
||||||
|
${{ format('{0}/{1}{2}', env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, matrix.build ) }},enable=${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.os }}-
|
||||||
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ matrix.os }}
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && !contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,prefix=${{ matrix.os }}-,suffix=-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,suffix=-${{ matrix.os }}-latest
|
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && !contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,suffix=-${{ matrix.os }}-latest
|
||||||
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{matrix.os}}-latest
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.os }}-
|
||||||
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,suffix=-${{ matrix.os }}
|
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' || contains(fromJSON('["workflow_dispatch"]'), github.event_name) }},event=branch,suffix=-${{ matrix.os }}
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
latest=${{ (matrix.os == 'alpine') && (!contains(fromJSON('["workflow_dispatch"]'), github.event_name)) && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
|
||||||
|
|
||||||
- name: Download SHA256 tag of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }}
|
- name: Download metadata of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }}
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
||||||
if: ${{ matrix.build != 'snmptraps' }}
|
if: ${{ matrix.build != 'snmptraps' }}
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.os }}
|
path: ${{ env.BUILD_CACHE_FILE_NAME }}
|
||||||
key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.os }}-${{ github.run_id }}
|
key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.os }}-${{ github.run_id }}
|
||||||
|
|
||||||
- name: Retrieve ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }} SHA256 tag
|
- name: Process ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }} image metadata
|
||||||
id: base_build
|
id: base_build
|
||||||
if: ${{ matrix.build != 'snmptraps' }}
|
if: ${{ matrix.build != 'snmptraps' }}
|
||||||
env:
|
env:
|
||||||
BUILD_BASE: ${{ steps.build_base_image.outputs.build_base }}
|
CACHE_FILE_NAME: ${{ env.BUILD_CACHE_FILE_NAME }}
|
||||||
MATRIX_OS: ${{ matrix.os }}
|
|
||||||
DOCKER_REPOSITORY: ${{ env.DOCKER_REPOSITORY }}
|
|
||||||
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
|
|
||||||
run: |
|
run: |
|
||||||
BASE_TAG=$(cat "${BUILD_BASE}_${MATRIX_OS}")
|
echo "::group::Base build image metadata"
|
||||||
BUILD_BASE_IMAGE=${DOCKER_REPOSITORY}/${IMAGES_PREFIX}${BUILD_BASE}@${BASE_TAG}
|
cat "${CACHE_FILE_NAME}"
|
||||||
|
|
||||||
echo "::group::Base build image information"
|
|
||||||
echo "base_tag=${BASE_TAG}"
|
|
||||||
echo "base_build_image=${BUILD_BASE_IMAGE}"
|
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
|
IMAGE_DIGEST=$(jq -r '."containerimage.digest"' "${CACHE_FILE_NAME}")
|
||||||
echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
|
IMAGE_NAME=$(jq -r '."image.name"' "${CACHE_FILE_NAME}" | cut -d: -f1)
|
||||||
|
|
||||||
|
echo "base_build_image=${IMAGE_NAME}@${IMAGE_DIGEST}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Verify ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }} cosign
|
- name: Verify ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }} cosign
|
||||||
if: ${{ matrix.build != 'snmptraps' }}
|
if: ${{ matrix.build != 'snmptraps' && env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
|
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
|
||||||
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
|
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
|
||||||
IDENITY_REGEX: ${{ env.IDENITY_REGEX }}
|
IDENTITY_REGEX: ${{ env.IDENTITY_REGEX }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Image sign data"
|
echo "::group::Image sign data"
|
||||||
echo "OIDC issuer=$OIDC_ISSUER"
|
echo "OIDC issuer=${OIDC_ISSUER}"
|
||||||
echo "Identity=$IDENITY_REGEX"
|
echo "Identity=${IDENTITY_REGEX}"
|
||||||
echo "Image to verify=$BASE_IMAGE"
|
echo "Image to verify=${BASE_IMAGE}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "::group::Verify signature"
|
echo "::group::Verify signature"
|
||||||
cosign verify \
|
cosign verify \
|
||||||
--certificate-oidc-issuer-regexp "$OIDC_ISSUER" \
|
--certificate-oidc-issuer-regexp "${OIDC_ISSUER}" \
|
||||||
--certificate-identity-regexp "$IDENITY_REGEX" \
|
--certificate-identity-regexp "${IDENTITY_REGEX}" \
|
||||||
"$BASE_IMAGE"
|
"${BASE_IMAGE}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Prepare cache data
|
- name: Prepare cache data
|
||||||
|
if: ${{ matrix.build != 'snmptraps' }}
|
||||||
id: cache_data
|
id: cache_data
|
||||||
env:
|
env:
|
||||||
BASE_IMAGE_TAG: ${{ steps.base_build.outputs.base_build_image }}
|
BASE_IMAGE_TAG: ${{ steps.base_build.outputs.base_build_image }}
|
||||||
run: |
|
run: |
|
||||||
cache_images=""
|
cache_from=()
|
||||||
if [[ ! -z "$BASE_IMAGE_TAG" ]]; then
|
cache_to=()
|
||||||
cache_images="type=gha,scope=$BASE_IMAGE_TAG"$'\n'"type=registry,ref=$BASE_IMAGE_TAG"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::group::Base images cache"
|
cache_from+=("type=registry,ref=${BASE_IMAGE_TAG}")
|
||||||
echo "$cache_images"
|
|
||||||
|
echo "::group::Cache from data"
|
||||||
|
echo "${cache_from[*]}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
echo "cache_from=$cache_images" >> $GITHUB_OUTPUT
|
cache_from=$(printf '%s\n' "${cache_from[@]}")
|
||||||
|
|
||||||
|
echo 'cache_from<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
echo "$cache_from" >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
|
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
|
context: ${{ format('{0}/{1}/{2}', env.DOCKERFILES_DIRECTORY, matrix.build, matrix.os) }}
|
||||||
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
|
file: ${{ format('{0}/{1}/{2}/Dockerfile', env.DOCKERFILES_DIRECTORY, matrix.build, matrix.os) }}
|
||||||
platforms: ${{ steps.platform.outputs.list }}
|
platforms: ${{ steps.platform.outputs.list }}
|
||||||
push: ${{ env.AUTO_PUSH_IMAGES }}
|
push: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
org.opencontainers.image.revision=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||||
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
||||||
cache-from: ${{ steps.cache_data.outputs.cache_from }}
|
|
||||||
|
|
||||||
- name: Sign the images with GitHub OIDC Token
|
- name: Sign the images with GitHub OIDC Token
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
TAGS: ${{ steps.meta.outputs.tags }}
|
||||||
@ -1054,10 +1244,11 @@ jobs:
|
|||||||
cosign sign --yes ${images}
|
cosign sign --yes ${images}
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image metadata
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.docker_build.outputs.digest }}
|
METADATA: ${{ steps.docker_build.outputs.metadata }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Image digest"
|
echo "::group::Image metadata"
|
||||||
echo "$DIGEST"
|
echo "${METADATA}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
45
.github/workflows/images_build_rhel.yml
vendored
45
.github/workflows/images_build_rhel.yml
vendored
@ -29,7 +29,8 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
AUTO_PUSH_IMAGES: ${{ contains(fromJSON('["workflow_dispatch", "push"]'), github.event_name) && 'false' || vars.AUTO_PUSH_IMAGES }}
|
TRUNK_ONLY_EVENT: ${{ contains(fromJSON('["schedule"]'), github.event_name) }}
|
||||||
|
AUTO_PUSH_IMAGES: ${{ ! contains(fromJSON('["workflow_dispatch", "push"]'), github.event_name) && vars.AUTO_PUSH_IMAGES }}
|
||||||
|
|
||||||
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
|
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
|
||||||
TRUNK_GIT_BRANCH: "refs/heads/trunk"
|
TRUNK_GIT_BRANCH: "refs/heads/trunk"
|
||||||
@ -164,7 +165,7 @@ jobs:
|
|||||||
echo "secret_prefix=RHEL_${github_ref//.}" >> $GITHUB_OUTPUT
|
echo "secret_prefix=RHEL_${github_ref//.}" >> $GITHUB_OUTPUT
|
||||||
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
|
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Cleanup cache
|
- name: Cleanup existing cache
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -205,11 +206,13 @@ jobs:
|
|||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4
|
||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.3'
|
cosign-release: 'v2.2.3'
|
||||||
|
|
||||||
- name: Check cosign version
|
- name: Check cosign version
|
||||||
|
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
|
||||||
run: cosign version
|
run: cosign version
|
||||||
|
|
||||||
- name: Fix string case
|
- name: Fix string case
|
||||||
@ -231,7 +234,7 @@ jobs:
|
|||||||
id: build_image
|
id: build_image
|
||||||
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
|
context: ${{ format('{0}/{1}/rhel', env.DOCKERFILES_DIRECTORY, matrix.build) }}
|
||||||
layers: false
|
layers: false
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
containerfiles: |
|
containerfiles: |
|
||||||
@ -240,7 +243,7 @@ jobs:
|
|||||||
--pull
|
--pull
|
||||||
--iidfile=${{ github.workspace }}/iidfile
|
--iidfile=${{ github.workspace }}/iidfile
|
||||||
|
|
||||||
- name: Prepare image metadata
|
- name: Image metadata
|
||||||
id: image_metadata
|
id: image_metadata
|
||||||
env:
|
env:
|
||||||
IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
|
IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
|
||||||
@ -279,7 +282,6 @@ jobs:
|
|||||||
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
|
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
|
||||||
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
||||||
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
||||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Result"
|
echo "::group::Result"
|
||||||
echo "Image ${IMAGE_TAG} location: \"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
|
echo "Image ${IMAGE_TAG} location: \"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
|
||||||
@ -310,13 +312,14 @@ jobs:
|
|||||||
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
|
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
|
||||||
env:
|
env:
|
||||||
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
||||||
|
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Removing orphaned image"
|
echo "::group::Removing orphaned image"
|
||||||
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
|
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
build_base_database:
|
build_base_database:
|
||||||
timeout-minutes: 180
|
timeout-minutes: 90
|
||||||
needs: [ "build_base", "init_build"]
|
needs: [ "build_base", "init_build"]
|
||||||
name: Build ${{ matrix.build }} base (${{ matrix.arch }})
|
name: Build ${{ matrix.build }} base (${{ matrix.arch }})
|
||||||
strategy:
|
strategy:
|
||||||
@ -348,7 +351,7 @@ jobs:
|
|||||||
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
|
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
|
||||||
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
|
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
|
||||||
|
|
||||||
- name: Pull ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} image
|
- name: Pull ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} image from local storage
|
||||||
id: base_build
|
id: base_build
|
||||||
env:
|
env:
|
||||||
MATRIX_ARCH: ${{ matrix.arch }}
|
MATRIX_ARCH: ${{ matrix.arch }}
|
||||||
@ -389,7 +392,7 @@ jobs:
|
|||||||
id: build_image
|
id: build_image
|
||||||
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
|
context: ${{ format('{0}/{1}/rhel', env.DOCKERFILES_DIRECTORY, matrix.build) }}
|
||||||
layers: false
|
layers: false
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
containerfiles: |
|
containerfiles: |
|
||||||
@ -423,7 +426,7 @@ jobs:
|
|||||||
echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
|
echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
|
||||||
echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Cache image digest
|
- name: Cache image metadata
|
||||||
uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
@ -437,7 +440,6 @@ jobs:
|
|||||||
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
|
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
|
||||||
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
||||||
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
||||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Result"
|
echo "::group::Result"
|
||||||
echo "podman push \"${IMAGE_TAG}\" dir:\"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
|
echo "podman push \"${IMAGE_TAG}\" dir:\"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
|
||||||
@ -468,13 +470,14 @@ jobs:
|
|||||||
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
|
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
|
||||||
env:
|
env:
|
||||||
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
|
||||||
|
IMAGE_DIR: ${{ env.IMAGE_DIR }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Removing orphaned image"
|
echo "::group::Removing orphaned image"
|
||||||
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
|
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
build_images:
|
build_images:
|
||||||
timeout-minutes: 90
|
timeout-minutes: 30
|
||||||
needs: [ "build_base_database", "init_build"]
|
needs: [ "build_base_database", "init_build"]
|
||||||
name: Build ${{ matrix.build }} image (${{ matrix.arch }})
|
name: Build ${{ matrix.build }} image (${{ matrix.arch }})
|
||||||
strategy:
|
strategy:
|
||||||
@ -580,7 +583,7 @@ jobs:
|
|||||||
id: build_image
|
id: build_image
|
||||||
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
|
||||||
with:
|
with:
|
||||||
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
|
context: ${{ format('{0}/{1}/rhel', env.DOCKERFILES_DIRECTORY, matrix.build) }}
|
||||||
layers: false
|
layers: false
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: |
|
labels: |
|
||||||
@ -588,6 +591,9 @@ jobs:
|
|||||||
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
org.opencontainers.image.created=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
|
||||||
containerfiles: |
|
containerfiles: |
|
||||||
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
|
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
|
||||||
|
extra-args: |
|
||||||
|
--pull
|
||||||
|
--iidfile=${{ github.workspace }}/iidfile
|
||||||
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
|
||||||
|
|
||||||
- name: Log in to ${{ env.REGISTRY }}
|
- name: Log in to ${{ env.REGISTRY }}
|
||||||
@ -657,11 +663,24 @@ jobs:
|
|||||||
podman rmi -i -f "$PREFLIGHT_IMAGE"
|
podman rmi -i -f "$PREFLIGHT_IMAGE"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Image digest
|
||||||
|
env:
|
||||||
|
GITHUB_WORKSPACE: ${{ github.workspace }}
|
||||||
|
run: |
|
||||||
|
TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
|
||||||
|
echo "::group::Image digest"
|
||||||
|
echo "$TAG_ID"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Post build image
|
- name: Post build image
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
|
env:
|
||||||
|
GITHUB_WORKSPACE: ${{ github.workspace }}
|
||||||
run: |
|
run: |
|
||||||
echo "::group::Result"
|
echo "::group::Result"
|
||||||
|
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/iidfile"
|
||||||
|
|
||||||
echo "Removing working containers"
|
echo "Removing working containers"
|
||||||
buildah rm -a 2>/dev/null || true
|
buildah rm -a 2>/dev/null || true
|
||||||
echo "Removing container data in storage not controlled by podman"
|
echo "Removing container data in storage not controlled by podman"
|
||||||
@ -674,7 +693,7 @@ jobs:
|
|||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
clear_artifacts:
|
clear_artifacts:
|
||||||
timeout-minutes: 90
|
timeout-minutes: 10
|
||||||
needs: [ "build_images", "init_build"]
|
needs: [ "build_images", "init_build"]
|
||||||
name: Clear ${{ matrix.build }} image cache (${{ matrix.arch }})
|
name: Clear ${{ matrix.build }} image cache (${{ matrix.arch }})
|
||||||
strategy:
|
strategy:
|
||||||
|
156
.github/workflows/rhel_registry_description.yml
vendored
Normal file
156
.github/workflows/rhel_registry_description.yml
vendored
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
name: Red Hat Catalog Description
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '[0-9]+.[0-9]+'
|
||||||
|
paths:
|
||||||
|
- 'Dockerfiles/*/*/README.html'
|
||||||
|
- 'Dockerfiles/*/*/README.md'
|
||||||
|
- '.github/workflows/rhel_registry_description.yml'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOCKERFILES_DIRECTORY: "./Dockerfiles"
|
||||||
|
API_URL: "https://catalog.redhat.com/api/containers/v1/projects/certification/id/"
|
||||||
|
MATRIX_FILE: "build.json"
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
init:
|
||||||
|
name: Initialize workflow
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
outputs:
|
||||||
|
components: ${{ steps.components.outputs.list }}
|
||||||
|
zabbix_release: ${{ steps.branch_info.outputs.zabbix_release }}
|
||||||
|
steps:
|
||||||
|
- name: Block egress traffic
|
||||||
|
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
||||||
|
with:
|
||||||
|
disable-sudo: true
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
github.com:443
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
sparse-checkout: ${{ env.MATRIX_FILE }}
|
||||||
|
|
||||||
|
- name: Check ${{ env.MATRIX_FILE }} file
|
||||||
|
id: build_exists
|
||||||
|
env:
|
||||||
|
MATRIX_FILE: ${{ env.MATRIX_FILE }}
|
||||||
|
run: |
|
||||||
|
if [[ ! -f "$MATRIX_FILE" ]]; then
|
||||||
|
echo "::error::File $MATRIX_FILE is missing"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Get branch info
|
||||||
|
id: branch_info
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
github_ref: ${{ github.ref }}
|
||||||
|
run: |
|
||||||
|
result=false
|
||||||
|
|
||||||
|
github_ref=${github_ref##*/}
|
||||||
|
|
||||||
|
echo "::group::Branch metadata"
|
||||||
|
echo "zabbix_release=${github_ref//.}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "zabbix_release=${github_ref//.}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Prepare Zabbix component list
|
||||||
|
id: components
|
||||||
|
env:
|
||||||
|
MATRIX_FILE: ${{ env.MATRIX_FILE }}
|
||||||
|
run: |
|
||||||
|
component_list=$(jq -r '.components | map_values(select(.rhel == true)) | keys | @json' "$MATRIX_FILE")
|
||||||
|
|
||||||
|
echo "::group::Zabbix Component List"
|
||||||
|
echo "$component_list"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "list=$component_list" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
publish:
|
||||||
|
name: Initialize build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: init
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
component: ${{ fromJson(needs.init.outputs.components) }}
|
||||||
|
steps:
|
||||||
|
- name: Block egress traffic
|
||||||
|
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
|
||||||
|
with:
|
||||||
|
disable-sudo: true
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
catalog.redhat.com:443
|
||||||
|
files.pythonhosted.org:443
|
||||||
|
github.com:443
|
||||||
|
pypi.org:443
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Variables formating
|
||||||
|
id: var_format
|
||||||
|
env:
|
||||||
|
MATRIX_BUILD: ${{ matrix.component }}
|
||||||
|
run: |
|
||||||
|
MATRIX_BUILD=${MATRIX_BUILD^^}
|
||||||
|
MATRIX_BUILD=${MATRIX_BUILD//-/_}
|
||||||
|
|
||||||
|
echo "::group::Result"
|
||||||
|
echo "matrix_build=${MATRIX_BUILD}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
echo "matrix_build=${MATRIX_BUILD}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Setup Python 3.x
|
||||||
|
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install markdown requests
|
||||||
|
|
||||||
|
- name: Update Red Hat project description
|
||||||
|
env:
|
||||||
|
DESCRIPTION_FILE: ${{ format('{0}/{1}/rhel/README', env.DOCKERFILES_DIRECTORY, matrix.component) }}
|
||||||
|
PROJECT_ID: ${{ secrets[format('RHEL_{0}_{1}_PROJECT', needs.init.outputs.zabbix_release, steps.var_format.outputs.matrix_build)] }}
|
||||||
|
PYXIS_API_TOKEN: ${{ secrets.REDHAT_API_TOKEN }}
|
||||||
|
API_URL: ${{ env.API_URL }}
|
||||||
|
run: |
|
||||||
|
python ./.github/scripts/rhel_description.py
|
||||||
|
|
||||||
|
- name: Red Hat Gatalog URL
|
||||||
|
env:
|
||||||
|
COMPONENT: ${{ matrix.component }}
|
||||||
|
MATRIX_FILE: ${{ env.MATRIX_FILE }}
|
||||||
|
run: |
|
||||||
|
PROJECT_ID=$(jq -r ".components.\"$COMPONENT\".rhel_project" "$MATRIX_FILE")
|
||||||
|
|
||||||
|
echo "::group::URL"
|
||||||
|
echo "https://catalog.redhat.com/software/containers/${PROJECT_ID}"
|
||||||
|
echo "::endgroup::"
|
2
.github/workflows/scorecard.yml
vendored
2
.github/workflows/scorecard.yml
vendored
@ -73,6 +73,6 @@ jobs:
|
|||||||
|
|
||||||
# Upload the results to GitHub's code scanning dashboard.
|
# Upload the results to GitHub's code scanning dashboard.
|
||||||
- name: "Upload to code-scanning"
|
- name: "Upload to code-scanning"
|
||||||
uses: github/codeql-action/upload-sarif@e675ced7a7522a761fc9c8eb26682c8b27c42b2b # v3.24.1
|
uses: github/codeql-action/upload-sarif@379614612a29c9e28f31f39a59013eb8012a51f0 # v3.24.3
|
||||||
with:
|
with:
|
||||||
sarif_file: results.sarif
|
sarif_file: results.sarif
|
||||||
|
20
build.json
20
build.json
@ -11,6 +11,10 @@
|
|||||||
"linux/arm64",
|
"linux/arm64",
|
||||||
"linux/ppc64le"
|
"linux/ppc64le"
|
||||||
],
|
],
|
||||||
|
"ol": [
|
||||||
|
"linux/amd64",
|
||||||
|
"linux/arm64"
|
||||||
|
],
|
||||||
"rhel": [
|
"rhel": [
|
||||||
"X64"
|
"X64"
|
||||||
],
|
],
|
||||||
@ -26,17 +30,17 @@
|
|||||||
"windows-2022": "ltsc2022"
|
"windows-2022": "ltsc2022"
|
||||||
},
|
},
|
||||||
"components": {
|
"components": {
|
||||||
"agent": { "base": "build-mysql", "rhel": true },
|
"agent": { "base": "build-mysql", "rhel": true, "rhel_project": "5f0e061669aea31642b7afc0" },
|
||||||
"agent2": { "base": "build-mysql", "rhel": true },
|
"agent2": { "base": "build-mysql", "rhel": true, "rhel_project": "5f6e1ccbecb524508951218b" },
|
||||||
"java-gateway": { "base": "build-mysql", "rhel": true },
|
"java-gateway": { "base": "build-mysql", "rhel": true, "rhel_project": "5f0e0619ac3db9037095d215" },
|
||||||
"proxy-mysql": { "base": "build-mysql", "rhel": true },
|
"proxy-mysql": { "base": "build-mysql", "rhel": true, "rhel_project": "5f0e0612ecb5245089451308" },
|
||||||
"proxy-sqlite3": { "base": "build-sqlite3", "rhel": true },
|
"proxy-sqlite3": { "base": "build-sqlite3", "rhel": true, "rhel_project": "5f109e1169aea31642b7b294" },
|
||||||
"server-mysql": { "base": "build-mysql", "rhel": true },
|
"server-mysql": { "base": "build-mysql", "rhel": true, "rhel_project": "5f109e16ecb52450894515f5" },
|
||||||
"server-pgsql": { "base": "build-pgsql", "rhel": false },
|
"server-pgsql": { "base": "build-pgsql", "rhel": false },
|
||||||
"snmptraps": { "base": "", "rhel": true },
|
"snmptraps": { "base": "", "rhel": true, "rhel_project": "618567a9843af1624c4e452b" },
|
||||||
"web-apache-mysql": { "base": "build-mysql", "rhel": false },
|
"web-apache-mysql": { "base": "build-mysql", "rhel": false },
|
||||||
"web-apache-pgsql": { "base": "build-pgsql", "rhel": false },
|
"web-apache-pgsql": { "base": "build-pgsql", "rhel": false },
|
||||||
"web-nginx-mysql": { "base": "build-mysql", "rhel": true },
|
"web-nginx-mysql": { "base": "build-mysql", "rhel": true, "rhel_project": "5f109e312937386820360ef9" },
|
||||||
"web-nginx-pgsql": { "base": "build-mysql", "rhel": false }
|
"web-nginx-pgsql": { "base": "build-mysql", "rhel": false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user