forked from extern/podman-compose
Compare commits
56 Commits
Author | SHA1 | Date | |
---|---|---|---|
bce40c2db3 | |||
78f8cad7c4 | |||
7942a540cd | |||
cb9cf6002f | |||
06587c1dca | |||
bc9168b039 | |||
57c527c2c9 | |||
d1f5ac9edc | |||
0164c1db56 | |||
e5cdce4e7d | |||
280f1770bf | |||
f75d12af21 | |||
5454c3ad0f | |||
901adf47d0 | |||
bf07e91163 | |||
3890eacf57 | |||
cfd24cc2e8 | |||
79bfad103c | |||
d1509468c3 | |||
9011e9faa1 | |||
517aeba330 | |||
85d5d5dcc9 | |||
1ffd24dcf9 | |||
8c66b1cda7 | |||
a0005db474 | |||
221cf14501 | |||
a61945b516 | |||
6b6330c587 | |||
5d279c4948 | |||
5a3bdbf89b | |||
1eb166445b | |||
82182b7bc6 | |||
3f4618866b | |||
91bc6ebdb4 | |||
59a59c1a3a | |||
620f5d7473 | |||
6f902faed0 | |||
ccdf01e9b0 | |||
e6b1eabe4c | |||
75de39c239 | |||
874192568f | |||
0b853f29f4 | |||
847f01a6c6 | |||
e511e6420f | |||
a9723ec1cf | |||
1cb608d8a7 | |||
252f1d57a5 | |||
13856d2e9c | |||
8d8df0bc28 | |||
bc5f0123d9 | |||
9a08f85ffd | |||
8625d7a4e8 | |||
016c97fd1e | |||
2df11674c4 | |||
5eff38e743 | |||
7f5ce26b1b |
6
.github/workflows/pylint.yml
vendored
6
.github/workflows/pylint.yml
vendored
@ -11,8 +11,8 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install psf/black requirements
|
- name: Install psf/black requirements
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
sudo apt-get update
|
||||||
apt-get install -y python3 python3-venv
|
sudo apt-get install -y python3 python3-venv
|
||||||
- uses: psf/black@stable
|
- uses: psf/black@stable
|
||||||
with:
|
with:
|
||||||
options: "--check --verbose"
|
options: "--check --verbose"
|
||||||
@ -22,7 +22,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.8", "3.9", "3.10"]
|
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
@ -9,6 +9,9 @@ repos:
|
|||||||
# https://pre-commit.com/#top_level-default_language_version
|
# https://pre-commit.com/#top_level-default_language_version
|
||||||
language_version: python3.10
|
language_version: python3.10
|
||||||
types: [python]
|
types: [python]
|
||||||
|
args: [
|
||||||
|
"--check", # Don't apply changes automatically
|
||||||
|
]
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
rev: 6.0.0
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
- name: Manage AWX Container Images
|
- name: Manage AWX Container Images
|
||||||
block:
|
block:
|
||||||
- name: Export Docker awx image if it isnt local and there isnt a registry defined
|
- name: Export Docker awx image if it isn't local and there isn't a registry defined
|
||||||
docker_image:
|
docker_image:
|
||||||
name: "{{ awx_image }}"
|
name: "{{ awx_image }}"
|
||||||
tag: "{{ awx_version }}"
|
tag: "{{ awx_version }}"
|
||||||
|
@ -7,6 +7,6 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
CMD [ "python", "-m", "App.web" ]
|
CMD [ "python", "-m", "app.web" ]
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
|
# pylint: disable=import-error
|
||||||
|
# pylint: disable=unused-import
|
||||||
import os
|
import os
|
||||||
import asyncio
|
import asyncio # noqa: F401
|
||||||
|
|
||||||
import aioredis
|
import aioredis
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
@ -14,13 +16,13 @@ routes = web.RouteTableDef()
|
|||||||
|
|
||||||
|
|
||||||
@routes.get("/")
|
@routes.get("/")
|
||||||
async def hello(request):
|
async def hello(request): # pylint: disable=unused-argument
|
||||||
counter = await redis.incr("mycounter")
|
counter = await redis.incr("mycounter")
|
||||||
return web.Response(text=f"counter={counter}")
|
return web.Response(text=f"counter={counter}")
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/hello.json")
|
@routes.get("/hello.json")
|
||||||
async def hello_json(request):
|
async def hello_json(request): # pylint: disable=unused-argument
|
||||||
counter = await redis.incr("mycounter")
|
counter = await redis.incr("mycounter")
|
||||||
data = {"counter": counter}
|
data = {"counter": counter}
|
||||||
return web.json_response(data)
|
return web.json_response(data)
|
@ -1,4 +1,4 @@
|
|||||||
#! /usr/bin/python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# https://docs.docker.com/compose/compose-file/#service-configuration-reference
|
# https://docs.docker.com/compose/compose-file/#service-configuration-reference
|
||||||
@ -30,7 +30,7 @@ import shlex
|
|||||||
try:
|
try:
|
||||||
from shlex import quote as cmd_quote
|
from shlex import quote as cmd_quote
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from pipes import quote as cmd_quote
|
from pipes import quote as cmd_quote # pylint: disable=deprecated-module
|
||||||
|
|
||||||
# import fnmatch
|
# import fnmatch
|
||||||
# fnmatch.fnmatchcase(env, "*_HOST")
|
# fnmatch.fnmatchcase(env, "*_HOST")
|
||||||
@ -38,16 +38,32 @@ except ImportError:
|
|||||||
import yaml
|
import yaml
|
||||||
from dotenv import dotenv_values
|
from dotenv import dotenv_values
|
||||||
|
|
||||||
__version__ = "1.0.6"
|
__version__ = "1.0.7"
|
||||||
|
|
||||||
script = os.path.realpath(sys.argv[0])
|
script = os.path.realpath(sys.argv[0])
|
||||||
|
|
||||||
# helper functions
|
# helper functions
|
||||||
is_str = lambda s: isinstance(s, str)
|
|
||||||
is_dict = lambda d: isinstance(d, dict)
|
|
||||||
is_list = lambda l: not is_str(l) and not is_dict(l) and hasattr(l, "__iter__")
|
def is_str(string_object):
|
||||||
|
return isinstance(string_object, str)
|
||||||
|
|
||||||
|
|
||||||
|
def is_dict(dict_object):
|
||||||
|
return isinstance(dict_object, dict)
|
||||||
|
|
||||||
|
|
||||||
|
def is_list(list_object):
|
||||||
|
return (
|
||||||
|
not is_str(list_object)
|
||||||
|
and not is_dict(list_object)
|
||||||
|
and hasattr(list_object, "__iter__")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# identity filter
|
# identity filter
|
||||||
filteri = lambda a: filter(lambda i: i, a)
|
def filteri(a):
|
||||||
|
return filter(lambda i: i, a)
|
||||||
|
|
||||||
|
|
||||||
def try_int(i, fallback=None):
|
def try_int(i, fallback=None):
|
||||||
@ -204,7 +220,7 @@ def fix_mount_dict(compose, mount_dict, proj_name, srv_name):
|
|||||||
vol = (vols.get(source, None) or {}) if source else {}
|
vol = (vols.get(source, None) or {}) if source else {}
|
||||||
name = vol.get("name", None)
|
name = vol.get("name", None)
|
||||||
mount_dict["_vol"] = vol
|
mount_dict["_vol"] = vol
|
||||||
# handle anonymouse or implied volume
|
# handle anonymous or implied volume
|
||||||
if not source:
|
if not source:
|
||||||
# missing source
|
# missing source
|
||||||
vol["name"] = "_".join(
|
vol["name"] = "_".join(
|
||||||
@ -575,7 +591,7 @@ def get_secret_args(compose, cnt, secret):
|
|||||||
# docker-compose does not support external secrets outside of swarm mode.
|
# docker-compose does not support external secrets outside of swarm mode.
|
||||||
# However accessing these via podman is trivial
|
# However accessing these via podman is trivial
|
||||||
# since these commands are directly translated to
|
# since these commands are directly translated to
|
||||||
# podman-create commands, albiet we can only support a 1:1 mapping
|
# podman-create commands, albeit we can only support a 1:1 mapping
|
||||||
# at the moment
|
# at the moment
|
||||||
if declared_secret.get("external", False) or declared_secret.get("name", None):
|
if declared_secret.get("external", False) or declared_secret.get("name", None):
|
||||||
secret_opts += f",uid={uid}" if uid else ""
|
secret_opts += f",uid={uid}" if uid else ""
|
||||||
@ -602,7 +618,7 @@ def get_secret_args(compose, cnt, secret):
|
|||||||
return ["--secret", "{}{}".format(secret_name, secret_opts)]
|
return ["--secret", "{}{}".format(secret_name, secret_opts)]
|
||||||
|
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'ERROR: unparseable secret: "{}", service: "{}"'.format(
|
'ERROR: unparsable secret: "{}", service: "{}"'.format(
|
||||||
secret_name, cnt["_service"]
|
secret_name, cnt["_service"]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -730,7 +746,7 @@ def assert_cnt_nets(compose, cnt):
|
|||||||
"--label",
|
"--label",
|
||||||
f"com.docker.compose.project={proj_name}",
|
f"com.docker.compose.project={proj_name}",
|
||||||
]
|
]
|
||||||
# TODO: add more options here, like driver, internal, ..etc
|
# TODO: add more options here, like dns, ipv6, etc.
|
||||||
labels = net_desc.get("labels", None) or []
|
labels = net_desc.get("labels", None) or []
|
||||||
for item in norm_as_list(labels):
|
for item in norm_as_list(labels):
|
||||||
args.extend(["--label", item])
|
args.extend(["--label", item])
|
||||||
@ -742,15 +758,17 @@ def assert_cnt_nets(compose, cnt):
|
|||||||
driver_opts = net_desc.get("driver_opts", None) or {}
|
driver_opts = net_desc.get("driver_opts", None) or {}
|
||||||
for key, value in driver_opts.items():
|
for key, value in driver_opts.items():
|
||||||
args.extend(("--opt", f"{key}={value}"))
|
args.extend(("--opt", f"{key}={value}"))
|
||||||
ipam_config_ls = (net_desc.get("ipam", None) or {}).get(
|
ipam = net_desc.get("ipam", None) or {}
|
||||||
"config", None
|
ipam_driver = ipam.get("driver", None)
|
||||||
) or []
|
if ipam_driver:
|
||||||
|
args.extend(("--ipam-driver", ipam_driver))
|
||||||
|
ipam_config_ls = ipam.get("config", None) or []
|
||||||
if is_dict(ipam_config_ls):
|
if is_dict(ipam_config_ls):
|
||||||
ipam_config_ls = [ipam_config_ls]
|
ipam_config_ls = [ipam_config_ls]
|
||||||
for ipam in ipam_config_ls:
|
for ipam_config in ipam_config_ls:
|
||||||
subnet = ipam.get("subnet", None)
|
subnet = ipam_config.get("subnet", None)
|
||||||
ip_range = ipam.get("ip_range", None)
|
ip_range = ipam_config.get("ip_range", None)
|
||||||
gateway = ipam.get("gateway", None)
|
gateway = ipam_config.get("gateway", None)
|
||||||
if subnet:
|
if subnet:
|
||||||
args.extend(("--subnet", subnet))
|
args.extend(("--subnet", subnet))
|
||||||
if ip_range:
|
if ip_range:
|
||||||
@ -777,6 +795,8 @@ def get_net_args(compose, cnt):
|
|||||||
net_args.extend(["--network", net])
|
net_args.extend(["--network", net])
|
||||||
elif net.startswith("slirp4netns:"):
|
elif net.startswith("slirp4netns:"):
|
||||||
net_args.extend(["--network", net])
|
net_args.extend(["--network", net])
|
||||||
|
elif net.startswith("ns:"):
|
||||||
|
net_args.extend(["--network", net])
|
||||||
elif net.startswith("service:"):
|
elif net.startswith("service:"):
|
||||||
other_srv = net.split(":", 1)[1].strip()
|
other_srv = net.split(":", 1)[1].strip()
|
||||||
other_cnt = compose.container_names_by_service[other_srv][0]
|
other_cnt = compose.container_names_by_service[other_srv][0]
|
||||||
@ -797,15 +817,25 @@ def get_net_args(compose, cnt):
|
|||||||
cnt_nets = cnt.get("networks", None)
|
cnt_nets = cnt.get("networks", None)
|
||||||
aliases = [service_name]
|
aliases = [service_name]
|
||||||
# NOTE: from podman manpage:
|
# NOTE: from podman manpage:
|
||||||
# NOTE: A container will only have access to aliases on the first network that it joins. This is a limitation that will be removed in a later release.
|
# NOTE: A container will only have access to aliases on the first network
|
||||||
|
# that it joins. This is a limitation that will be removed in a later
|
||||||
|
# release.
|
||||||
ip = None
|
ip = None
|
||||||
ip6 = None
|
ip6 = None
|
||||||
|
ip_assignments = 0
|
||||||
|
if cnt.get("_aliases", None):
|
||||||
|
aliases.extend(cnt.get("_aliases", None))
|
||||||
if cnt_nets and is_dict(cnt_nets):
|
if cnt_nets and is_dict(cnt_nets):
|
||||||
prioritized_cnt_nets = []
|
prioritized_cnt_nets = []
|
||||||
# cnt_nets is {net_key: net_value, ...}
|
# cnt_nets is {net_key: net_value, ...}
|
||||||
for net_key, net_value in cnt_nets.items():
|
for net_key, net_value in cnt_nets.items():
|
||||||
net_value = net_value or {}
|
net_value = net_value or {}
|
||||||
aliases.extend(norm_as_list(net_value.get("aliases", None)))
|
aliases.extend(norm_as_list(net_value.get("aliases", None)))
|
||||||
|
if net_value.get("ipv4_address", None) is not None:
|
||||||
|
ip_assignments = ip_assignments + 1
|
||||||
|
if net_value.get("ipv6_address", None) is not None:
|
||||||
|
ip_assignments = ip_assignments + 1
|
||||||
|
|
||||||
if not ip:
|
if not ip:
|
||||||
ip = net_value.get("ipv4_address", None)
|
ip = net_value.get("ipv4_address", None)
|
||||||
if not ip6:
|
if not ip6:
|
||||||
@ -832,12 +862,39 @@ def get_net_args(compose, cnt):
|
|||||||
)
|
)
|
||||||
net_names.append(net_name)
|
net_names.append(net_name)
|
||||||
net_names_str = ",".join(net_names)
|
net_names_str = ",".join(net_names)
|
||||||
if is_bridge:
|
|
||||||
net_args.extend(["--net", net_names_str, "--network-alias", ",".join(aliases)])
|
if ip_assignments > 1:
|
||||||
if ip:
|
multiple_nets = cnt.get("networks", None)
|
||||||
net_args.append(f"--ip={ip}")
|
multiple_net_names = multiple_nets.keys()
|
||||||
if ip6:
|
|
||||||
net_args.append(f"--ip6={ip6}")
|
for net_ in multiple_net_names:
|
||||||
|
net_desc = nets[net_] or {}
|
||||||
|
is_ext = net_desc.get("external", None)
|
||||||
|
ext_desc = is_ext if is_dict(is_ext) else {}
|
||||||
|
default_net_name = net_ if is_ext else f"{proj_name}_{net_}"
|
||||||
|
net_name = (
|
||||||
|
ext_desc.get("name", None)
|
||||||
|
or net_desc.get("name", None)
|
||||||
|
or default_net_name
|
||||||
|
)
|
||||||
|
|
||||||
|
ipv4 = multiple_nets[net_].get("ipv4_address", None)
|
||||||
|
ipv6 = multiple_nets[net_].get("ipv6_address", None)
|
||||||
|
if ipv4 is not None and ipv6 is not None:
|
||||||
|
net_args.extend(["--network", f"{net_name}:ip={ipv4},ip={ipv6}"])
|
||||||
|
elif ipv4 is None and ipv6 is not None:
|
||||||
|
net_args.extend(["--network", f"{net_name}:ip={ipv6}"])
|
||||||
|
elif ipv6 is None and ipv4 is not None:
|
||||||
|
net_args.extend(["--network", f"{net_name}:ip={ipv4}"])
|
||||||
|
else:
|
||||||
|
if is_bridge:
|
||||||
|
net_args.extend(
|
||||||
|
["--net", net_names_str, "--network-alias", ",".join(aliases)]
|
||||||
|
)
|
||||||
|
if ip:
|
||||||
|
net_args.append(f"--ip={ip}")
|
||||||
|
if ip6:
|
||||||
|
net_args.append(f"--ip6={ip6}")
|
||||||
return net_args
|
return net_args
|
||||||
|
|
||||||
|
|
||||||
@ -973,7 +1030,7 @@ def container_to_args(compose, cnt, detached=True):
|
|||||||
# WIP: healthchecks are still work in progress
|
# WIP: healthchecks are still work in progress
|
||||||
healthcheck = cnt.get("healthcheck", None) or {}
|
healthcheck = cnt.get("healthcheck", None) or {}
|
||||||
if not is_dict(healthcheck):
|
if not is_dict(healthcheck):
|
||||||
raise ValueError("'healthcheck' must be an key-value mapping")
|
raise ValueError("'healthcheck' must be a key-value mapping")
|
||||||
healthcheck_disable = healthcheck.get("disable", False)
|
healthcheck_disable = healthcheck.get("disable", False)
|
||||||
healthcheck_test = healthcheck.get("test", None)
|
healthcheck_test = healthcheck.get("test", None)
|
||||||
if healthcheck_disable:
|
if healthcheck_disable:
|
||||||
@ -1019,6 +1076,14 @@ def container_to_args(compose, cnt, detached=True):
|
|||||||
if "retries" in healthcheck:
|
if "retries" in healthcheck:
|
||||||
podman_args.extend(["--healthcheck-retries", str(healthcheck["retries"])])
|
podman_args.extend(["--healthcheck-retries", str(healthcheck["retries"])])
|
||||||
|
|
||||||
|
# handle podman extension
|
||||||
|
x_podman = cnt.get("x-podman", None)
|
||||||
|
if x_podman is not None:
|
||||||
|
for uidmap in x_podman.get("uidmaps", []):
|
||||||
|
podman_args.extend(["--uidmap", uidmap])
|
||||||
|
for gidmap in x_podman.get("gidmaps", []):
|
||||||
|
podman_args.extend(["--gidmap", gidmap])
|
||||||
|
|
||||||
podman_args.append(cnt["image"]) # command, ..etc.
|
podman_args.append(cnt["image"]) # command, ..etc.
|
||||||
command = cnt.get("command", None)
|
command = cnt.get("command", None)
|
||||||
if command is not None:
|
if command is not None:
|
||||||
@ -1075,6 +1140,12 @@ def flat_deps(services, with_extends=False):
|
|||||||
if not is_list(links_ls):
|
if not is_list(links_ls):
|
||||||
links_ls = [links_ls]
|
links_ls = [links_ls]
|
||||||
deps.update([(c.split(":")[0] if ":" in c else c) for c in links_ls])
|
deps.update([(c.split(":")[0] if ":" in c else c) for c in links_ls])
|
||||||
|
for c in links_ls:
|
||||||
|
if ":" in c:
|
||||||
|
dep_name, dep_alias = c.split(":")
|
||||||
|
if not "_aliases" in services[dep_name]:
|
||||||
|
services[dep_name]["_aliases"] = set()
|
||||||
|
services[dep_name]["_aliases"].add(dep_alias)
|
||||||
for name, srv in services.items():
|
for name, srv in services.items():
|
||||||
rec_deps(services, name)
|
rec_deps(services, name)
|
||||||
|
|
||||||
@ -1127,7 +1198,11 @@ class Podman:
|
|||||||
log(" ".join([str(i) for i in cmd_ls]))
|
log(" ".join([str(i) for i in cmd_ls]))
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
return None
|
return None
|
||||||
# subprocess.Popen(args, bufsize = 0, executable = None, stdin = None, stdout = None, stderr = None, preexec_fn = None, close_fds = False, shell = False, cwd = None, env = None, universal_newlines = False, startupinfo = None, creationflags = 0)
|
# subprocess.Popen(
|
||||||
|
# args, bufsize = 0, executable = None, stdin = None, stdout = None, stderr = None, preexec_fn = None,
|
||||||
|
# close_fds = False, shell = False, cwd = None, env = None, universal_newlines = False, startupinfo = None,
|
||||||
|
# creationflags = 0
|
||||||
|
# )
|
||||||
if log_formatter is not None:
|
if log_formatter is not None:
|
||||||
# Pipe podman process output through log_formatter (which can add colored prefix)
|
# Pipe podman process output through log_formatter (which can add colored prefix)
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
@ -1170,12 +1245,13 @@ class Podman:
|
|||||||
|
|
||||||
|
|
||||||
def normalize_service(service, sub_dir=""):
|
def normalize_service(service, sub_dir=""):
|
||||||
# make `build.context` relative to sub_dir
|
if "build" in service:
|
||||||
# TODO: should we make volume and secret relative too?
|
build = service["build"]
|
||||||
|
if is_str(build):
|
||||||
|
service["build"] = {"context": build}
|
||||||
if sub_dir and "build" in service:
|
if sub_dir and "build" in service:
|
||||||
build = service["build"]
|
build = service["build"]
|
||||||
context = build if is_str(build) else build.get("context", None)
|
context = build.get("context", None) or ""
|
||||||
context = context or ""
|
|
||||||
if context or sub_dir:
|
if context or sub_dir:
|
||||||
if context.startswith("./"):
|
if context.startswith("./"):
|
||||||
context = context[2:]
|
context = context[2:]
|
||||||
@ -1184,10 +1260,11 @@ def normalize_service(service, sub_dir=""):
|
|||||||
context = context.rstrip("/")
|
context = context.rstrip("/")
|
||||||
if not context:
|
if not context:
|
||||||
context = "."
|
context = "."
|
||||||
if is_str(build):
|
service["build"]["context"] = context
|
||||||
service["build"] = context
|
for key in ("command", "entrypoint"):
|
||||||
else:
|
if key in service:
|
||||||
service["build"]["context"] = context
|
if is_str(service[key]):
|
||||||
|
service[key] = shlex.split(service[key])
|
||||||
for key in ("env_file", "security_opt", "volumes"):
|
for key in ("env_file", "security_opt", "volumes"):
|
||||||
if key not in service:
|
if key not in service:
|
||||||
continue
|
continue
|
||||||
@ -1220,6 +1297,30 @@ def normalize(compose):
|
|||||||
return compose
|
return compose
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_service_final(service: dict, project_dir: str) -> dict:
|
||||||
|
if "build" in service:
|
||||||
|
build = service["build"]
|
||||||
|
context = build if is_str(build) else build.get("context", ".")
|
||||||
|
context = os.path.normpath(os.path.join(project_dir, context))
|
||||||
|
dockerfile = (
|
||||||
|
"Dockerfile"
|
||||||
|
if is_str(build)
|
||||||
|
else service["build"].get("dockerfile", "Dockerfile")
|
||||||
|
)
|
||||||
|
if not is_dict(service["build"]):
|
||||||
|
service["build"] = {}
|
||||||
|
service["build"]["dockerfile"] = dockerfile
|
||||||
|
service["build"]["context"] = context
|
||||||
|
return service
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_final(compose: dict, project_dir: str) -> dict:
|
||||||
|
services = compose.get("services", None) or {}
|
||||||
|
for service in services.values():
|
||||||
|
normalize_service_final(service, project_dir)
|
||||||
|
return compose
|
||||||
|
|
||||||
|
|
||||||
def clone(value):
|
def clone(value):
|
||||||
return value.copy() if is_list(value) or is_dict(value) else value
|
return value.copy() if is_list(value) or is_dict(value) else value
|
||||||
|
|
||||||
@ -1240,23 +1341,23 @@ def rec_merge_one(target, source):
|
|||||||
if key not in source:
|
if key not in source:
|
||||||
continue
|
continue
|
||||||
value2 = source[key]
|
value2 = source[key]
|
||||||
if key == "command":
|
if key in ("command", "entrypoint"):
|
||||||
target[key] = clone(value2)
|
target[key] = clone(value2)
|
||||||
continue
|
continue
|
||||||
if not isinstance(value2, type(value)):
|
if not isinstance(value2, type(value)):
|
||||||
value_type = type(value)
|
value_type = type(value)
|
||||||
value2_type = type(value2)
|
value2_type = type(value2)
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"can't merge value of {key} of type {value_type} and {value2_type}"
|
f"can't merge value of [{key}] of type {value_type} and {value2_type}"
|
||||||
)
|
)
|
||||||
if is_list(value2):
|
if is_list(value2):
|
||||||
if key == "volumes":
|
if key == "volumes":
|
||||||
# clean duplicate mount targets
|
# clean duplicate mount targets
|
||||||
pts = {v.split(":", 1)[1] for v in value2 if ":" in v}
|
pts = {v.split(":", 2)[1] for v in value2 if ":" in v}
|
||||||
del_ls = [
|
del_ls = [
|
||||||
ix
|
ix
|
||||||
for (ix, v) in enumerate(value)
|
for (ix, v) in enumerate(value)
|
||||||
if ":" in v and v.split(":", 1)[1] in pts
|
if ":" in v and v.split(":", 2)[1] in pts
|
||||||
]
|
]
|
||||||
for ix in reversed(del_ls):
|
for ix in reversed(del_ls):
|
||||||
del value[ix]
|
del value[ix]
|
||||||
@ -1298,7 +1399,7 @@ def resolve_extends(services, service_names, environ):
|
|||||||
content = content["services"]
|
content = content["services"]
|
||||||
subdirectory = os.path.dirname(filename)
|
subdirectory = os.path.dirname(filename)
|
||||||
content = rec_subs(content, environ)
|
content = rec_subs(content, environ)
|
||||||
from_service = content.get(from_service_name, {})
|
from_service = content.get(from_service_name, {}) or {}
|
||||||
normalize_service(from_service, subdirectory)
|
normalize_service(from_service, subdirectory)
|
||||||
else:
|
else:
|
||||||
from_service = services.get(from_service_name, {}).copy()
|
from_service = services.get(from_service_name, {}).copy()
|
||||||
@ -1420,7 +1521,9 @@ class PodmanCompose:
|
|||||||
if compose_required:
|
if compose_required:
|
||||||
self._parse_compose_file()
|
self._parse_compose_file()
|
||||||
cmd = self.commands[cmd_name]
|
cmd = self.commands[cmd_name]
|
||||||
cmd(self, args)
|
retcode = cmd(self, args)
|
||||||
|
if isinstance(retcode, int):
|
||||||
|
sys.exit(retcode)
|
||||||
|
|
||||||
def _parse_compose_file(self):
|
def _parse_compose_file(self):
|
||||||
args = self.global_args
|
args = self.global_args
|
||||||
@ -1459,11 +1562,16 @@ class PodmanCompose:
|
|||||||
dirname = os.path.realpath(os.path.dirname(filename))
|
dirname = os.path.realpath(os.path.dirname(filename))
|
||||||
dir_basename = os.path.basename(dirname)
|
dir_basename = os.path.basename(dirname)
|
||||||
self.dirname = dirname
|
self.dirname = dirname
|
||||||
|
|
||||||
|
# env-file is relative to the CWD
|
||||||
|
dotenv_dict = {}
|
||||||
|
if args.env_file:
|
||||||
|
dotenv_path = os.path.realpath(args.env_file)
|
||||||
|
dotenv_dict = dotenv_to_dict(dotenv_path)
|
||||||
|
|
||||||
# TODO: remove next line
|
# TODO: remove next line
|
||||||
os.chdir(dirname)
|
os.chdir(dirname)
|
||||||
|
|
||||||
dotenv_path = os.path.join(dirname, args.env_file)
|
|
||||||
dotenv_dict = dotenv_to_dict(dotenv_path)
|
|
||||||
os.environ.update(
|
os.environ.update(
|
||||||
{
|
{
|
||||||
key: value
|
key: value
|
||||||
@ -1483,7 +1591,15 @@ class PodmanCompose:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
compose = {}
|
compose = {}
|
||||||
for filename in files:
|
# Iterate over files primitively to allow appending to files in-loop
|
||||||
|
files_iter = iter(files)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
filename = next(files_iter)
|
||||||
|
except StopIteration:
|
||||||
|
break
|
||||||
|
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
content = yaml.safe_load(f)
|
content = yaml.safe_load(f)
|
||||||
# log(filename, json.dumps(content, indent = 2))
|
# log(filename, json.dumps(content, indent = 2))
|
||||||
@ -1497,6 +1613,22 @@ class PodmanCompose:
|
|||||||
# log(filename, json.dumps(content, indent = 2))
|
# log(filename, json.dumps(content, indent = 2))
|
||||||
content = rec_subs(content, self.environ)
|
content = rec_subs(content, self.environ)
|
||||||
rec_merge(compose, content)
|
rec_merge(compose, content)
|
||||||
|
# If `include` is used, append included files to files
|
||||||
|
include = compose.get("include", None)
|
||||||
|
if include:
|
||||||
|
files.append(*include)
|
||||||
|
# As compose obj is updated and tested with every loop, not deleting `include`
|
||||||
|
# from it, results in it being tested again and again, original values for
|
||||||
|
# `include` be appended to `files`, and, included files be processed for ever.
|
||||||
|
# Solution is to remove 'include' key from compose obj. This doesn't break
|
||||||
|
# having `include` present and correctly processed in included files
|
||||||
|
del compose["include"]
|
||||||
|
resolved_services = self._resolve_profiles(
|
||||||
|
compose.get("services", {}), set(args.profile)
|
||||||
|
)
|
||||||
|
compose["services"] = resolved_services
|
||||||
|
if not getattr(args, "no_normalize", None):
|
||||||
|
compose = normalize_final(compose, self.dirname)
|
||||||
self.merged_yaml = yaml.safe_dump(compose)
|
self.merged_yaml = yaml.safe_dump(compose)
|
||||||
merged_json_b = json.dumps(compose, separators=(",", ":")).encode("utf-8")
|
merged_json_b = json.dumps(compose, separators=(",", ":")).encode("utf-8")
|
||||||
self.yaml_hash = hashlib.sha256(merged_json_b).hexdigest()
|
self.yaml_hash = hashlib.sha256(merged_json_b).hexdigest()
|
||||||
@ -1511,7 +1643,8 @@ class PodmanCompose:
|
|||||||
if project_name is None:
|
if project_name is None:
|
||||||
# More strict then actually needed for simplicity: podman requires [a-zA-Z0-9][a-zA-Z0-9_.-]*
|
# More strict then actually needed for simplicity: podman requires [a-zA-Z0-9][a-zA-Z0-9_.-]*
|
||||||
project_name = (
|
project_name = (
|
||||||
os.environ.get("COMPOSE_PROJECT_NAME", None) or dir_basename.lower()
|
self.environ.get("COMPOSE_PROJECT_NAME", None)
|
||||||
|
or dir_basename.lower()
|
||||||
)
|
)
|
||||||
project_name = norm_re.sub("", project_name)
|
project_name = norm_re.sub("", project_name)
|
||||||
if not project_name:
|
if not project_name:
|
||||||
@ -1526,6 +1659,8 @@ class PodmanCompose:
|
|||||||
if services is None:
|
if services is None:
|
||||||
services = {}
|
services = {}
|
||||||
log("WARNING: No services defined")
|
log("WARNING: No services defined")
|
||||||
|
# include services with no profile defined or the selected profiles
|
||||||
|
services = self._resolve_profiles(services, set(args.profile))
|
||||||
|
|
||||||
# NOTE: maybe add "extends.service" to _deps at this stage
|
# NOTE: maybe add "extends.service" to _deps at this stage
|
||||||
flat_deps(services, with_extends=True)
|
flat_deps(services, with_extends=True)
|
||||||
@ -1640,6 +1775,30 @@ class PodmanCompose:
|
|||||||
self.containers = containers
|
self.containers = containers
|
||||||
self.container_by_name = {c["name"]: c for c in containers}
|
self.container_by_name = {c["name"]: c for c in containers}
|
||||||
|
|
||||||
|
def _resolve_profiles(self, defined_services, requested_profiles=None):
|
||||||
|
"""
|
||||||
|
Returns a service dictionary (key = service name, value = service config) compatible with the requested_profiles
|
||||||
|
list.
|
||||||
|
|
||||||
|
The returned service dictionary contains all services which do not include/reference a profile in addition to
|
||||||
|
services that match the requested_profiles.
|
||||||
|
|
||||||
|
:param defined_services: The service dictionary
|
||||||
|
:param requested_profiles: The profiles requested using the --profile arg.
|
||||||
|
"""
|
||||||
|
if requested_profiles is None:
|
||||||
|
requested_profiles = set()
|
||||||
|
|
||||||
|
services = {}
|
||||||
|
|
||||||
|
for name, config in defined_services.items():
|
||||||
|
service_profiles = set(config.get("profiles", []))
|
||||||
|
if not service_profiles or requested_profiles.intersection(
|
||||||
|
service_profiles
|
||||||
|
):
|
||||||
|
services[name] = config
|
||||||
|
return services
|
||||||
|
|
||||||
def _parse_args(self):
|
def _parse_args(self):
|
||||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
|
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
|
||||||
self._init_global_parser(parser)
|
self._init_global_parser(parser)
|
||||||
@ -1667,7 +1826,7 @@ class PodmanCompose:
|
|||||||
help="pod creation",
|
help="pod creation",
|
||||||
metavar="in_pod",
|
metavar="in_pod",
|
||||||
type=bool,
|
type=bool,
|
||||||
default=False,
|
default=True,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--pod-args",
|
"--pod-args",
|
||||||
@ -1691,6 +1850,13 @@ class PodmanCompose:
|
|||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--profile",
|
||||||
|
help="Specify a profile to enable",
|
||||||
|
metavar="profile",
|
||||||
|
action="append",
|
||||||
|
default=[],
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p",
|
"-p",
|
||||||
"--project-name",
|
"--project-name",
|
||||||
@ -1799,7 +1965,7 @@ def is_local(container: dict) -> bool:
|
|||||||
* has a build section and is not prefixed
|
* has a build section and is not prefixed
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
not "/" in container["image"]
|
"/" not in container["image"]
|
||||||
if "build" in container
|
if "build" in container
|
||||||
else container["image"].startswith("localhost/")
|
else container["image"].startswith("localhost/")
|
||||||
)
|
)
|
||||||
@ -1923,7 +2089,7 @@ def compose_push(compose, args):
|
|||||||
|
|
||||||
def build_one(compose, args, cnt):
|
def build_one(compose, args, cnt):
|
||||||
if "build" not in cnt:
|
if "build" not in cnt:
|
||||||
return
|
return None
|
||||||
if getattr(args, "if_not_exists", None):
|
if getattr(args, "if_not_exists", None):
|
||||||
try:
|
try:
|
||||||
img_id = compose.podman.output(
|
img_id = compose.podman.output(
|
||||||
@ -1932,7 +2098,7 @@ def build_one(compose, args, cnt):
|
|||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
img_id = None
|
img_id = None
|
||||||
if img_id:
|
if img_id:
|
||||||
return
|
return None
|
||||||
build_desc = cnt["build"]
|
build_desc = cnt["build"]
|
||||||
if not hasattr(build_desc, "items"):
|
if not hasattr(build_desc, "items"):
|
||||||
build_desc = {"context": build_desc}
|
build_desc = {"context": build_desc}
|
||||||
@ -1969,6 +2135,10 @@ def build_one(compose, args, cnt):
|
|||||||
build_args.append("--pull-always")
|
build_args.append("--pull-always")
|
||||||
elif getattr(args, "pull", None):
|
elif getattr(args, "pull", None):
|
||||||
build_args.append("--pull")
|
build_args.append("--pull")
|
||||||
|
env = dict(cnt.get("environment", {}))
|
||||||
|
for name, value in env.items():
|
||||||
|
build_args += ["--env", f"{name}" if value is None else f"{name}={value}"]
|
||||||
|
|
||||||
args_list = norm_as_list(build_desc.get("args", {}))
|
args_list = norm_as_list(build_desc.get("args", {}))
|
||||||
for build_arg in args_list + args.build_arg:
|
for build_arg in args_list + args.build_arg:
|
||||||
build_args.extend(
|
build_args.extend(
|
||||||
@ -1978,20 +2148,37 @@ def build_one(compose, args, cnt):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
build_args.append(ctx)
|
build_args.append(ctx)
|
||||||
compose.podman.run([], "build", build_args, sleep=0)
|
status = compose.podman.run([], "build", build_args, sleep=0)
|
||||||
|
return status
|
||||||
|
|
||||||
|
|
||||||
@cmd_run(podman_compose, "build", "build stack images")
|
@cmd_run(podman_compose, "build", "build stack images")
|
||||||
def compose_build(compose, args):
|
def compose_build(compose, args):
|
||||||
|
# keeps the status of the last service/container built
|
||||||
|
status = 0
|
||||||
|
|
||||||
|
def parse_return_code(obj, current_status):
|
||||||
|
if obj and obj.returncode != 0:
|
||||||
|
return obj.returncode
|
||||||
|
return current_status
|
||||||
|
|
||||||
if args.services:
|
if args.services:
|
||||||
container_names_by_service = compose.container_names_by_service
|
container_names_by_service = compose.container_names_by_service
|
||||||
compose.assert_services(args.services)
|
compose.assert_services(args.services)
|
||||||
for service in args.services:
|
for service in args.services:
|
||||||
cnt = compose.container_by_name[container_names_by_service[service][0]]
|
cnt = compose.container_by_name[container_names_by_service[service][0]]
|
||||||
build_one(compose, args, cnt)
|
p = build_one(compose, args, cnt)
|
||||||
|
status = parse_return_code(p, status)
|
||||||
|
if status != 0:
|
||||||
|
return status
|
||||||
else:
|
else:
|
||||||
for cnt in compose.containers:
|
for cnt in compose.containers:
|
||||||
build_one(compose, args, cnt)
|
p = build_one(compose, args, cnt)
|
||||||
|
status = parse_return_code(p, status)
|
||||||
|
if status != 0:
|
||||||
|
return status
|
||||||
|
|
||||||
|
return status
|
||||||
|
|
||||||
|
|
||||||
def create_pods(compose, args): # pylint: disable=unused-argument
|
def create_pods(compose, args): # pylint: disable=unused-argument
|
||||||
@ -2253,6 +2440,15 @@ def compose_run(compose, args):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
compose.commands["up"](compose, up_args)
|
compose.commands["up"](compose, up_args)
|
||||||
|
|
||||||
|
build_args = argparse.Namespace(
|
||||||
|
services=[args.service],
|
||||||
|
if_not_exists=(not args.build),
|
||||||
|
build_arg=[],
|
||||||
|
**args.__dict__,
|
||||||
|
)
|
||||||
|
compose.commands["build"](compose, build_args)
|
||||||
|
|
||||||
# adjust one-off container options
|
# adjust one-off container options
|
||||||
name0 = "{}_{}_tmp{}".format(
|
name0 = "{}_{}_tmp{}".format(
|
||||||
compose.project_name, args.service, random.randrange(0, 65536)
|
compose.project_name, args.service, random.randrange(0, 65536)
|
||||||
@ -2495,6 +2691,37 @@ def compose_kill(compose, args):
|
|||||||
compose.podman.run([], "kill", podman_args)
|
compose.podman.run([], "kill", podman_args)
|
||||||
|
|
||||||
|
|
||||||
|
@cmd_run(
|
||||||
|
podman_compose,
|
||||||
|
"stats",
|
||||||
|
"Display percentage of CPU, memory, network I/O, block I/O and PIDs for services.",
|
||||||
|
)
|
||||||
|
def compose_stats(compose, args):
|
||||||
|
container_names_by_service = compose.container_names_by_service
|
||||||
|
if not args.services:
|
||||||
|
args.services = container_names_by_service.keys()
|
||||||
|
targets = []
|
||||||
|
podman_args = []
|
||||||
|
if args.interval:
|
||||||
|
podman_args.extend(["--interval", args.interval])
|
||||||
|
if args.format:
|
||||||
|
podman_args.extend(["--format", args.format])
|
||||||
|
if args.no_reset:
|
||||||
|
podman_args.append("--no-reset")
|
||||||
|
if args.no_stream:
|
||||||
|
podman_args.append("--no-stream")
|
||||||
|
|
||||||
|
for service in args.services:
|
||||||
|
targets.extend(container_names_by_service[service])
|
||||||
|
for target in targets:
|
||||||
|
podman_args.append(target)
|
||||||
|
|
||||||
|
try:
|
||||||
|
compose.podman.run([], "stats", podman_args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
###################
|
###################
|
||||||
# command arguments parsing
|
# command arguments parsing
|
||||||
###################
|
###################
|
||||||
@ -2522,7 +2749,8 @@ def compose_up_parse(parser):
|
|||||||
"-d",
|
"-d",
|
||||||
"--detach",
|
"--detach",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Detached mode: Run container in the background, print new container name. Incompatible with --abort-on-container-exit.",
|
help="Detached mode: Run container in the background, print new container name. \
|
||||||
|
Incompatible with --abort-on-container-exit.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-color", action="store_true", help="Produce monochrome output."
|
"--no-color", action="store_true", help="Produce monochrome output."
|
||||||
@ -2573,7 +2801,8 @@ def compose_up_parse(parser):
|
|||||||
"--timeout",
|
"--timeout",
|
||||||
type=int,
|
type=int,
|
||||||
default=None,
|
default=None,
|
||||||
help="Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)",
|
help="Use this timeout in seconds for container shutdown when attached or when containers are already running. \
|
||||||
|
(default: 10)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-V",
|
"-V",
|
||||||
@ -2620,6 +2849,9 @@ def compose_down_parse(parser):
|
|||||||
|
|
||||||
@cmd_parse(podman_compose, "run")
|
@cmd_parse(podman_compose, "run")
|
||||||
def compose_run_parse(parser):
|
def compose_run_parse(parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--build", action="store_true", help="Build images before starting containers."
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
"--detach",
|
"--detach",
|
||||||
@ -2882,6 +3114,9 @@ def compose_build_parse(parser):
|
|||||||
|
|
||||||
@cmd_parse(podman_compose, "config")
|
@cmd_parse(podman_compose, "config")
|
||||||
def compose_config_parse(parser):
|
def compose_config_parse(parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--no-normalize", help="Don't normalize compose model.", action="store_true"
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--services", help="Print the service names, one per line.", action="store_true"
|
"--services", help="Print the service names, one per line.", action="store_true"
|
||||||
)
|
)
|
||||||
@ -2937,6 +3172,35 @@ def compose_kill_parse(parser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cmd_parse(podman_compose, ["stats"])
|
||||||
|
def compose_stats_parse(parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"services", metavar="services", nargs="*", default=None, help="service names"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
type=int,
|
||||||
|
help="Time in seconds between stats reports (default 5)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--format",
|
||||||
|
type=str,
|
||||||
|
help="Pretty-print container statistics to JSON or using a Go template",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--no-reset",
|
||||||
|
help="Disable resetting the screen between intervals",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--no-stream",
|
||||||
|
help="Disable streaming stats and only pull the first result",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
podman_compose.run()
|
podman_compose.run()
|
||||||
|
|
||||||
|
168
pytests/test_can_merge_build.py
Normal file
168
pytests/test_can_merge_build.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
from podman_compose import normalize_service, PodmanCompose
|
||||||
|
|
||||||
|
|
||||||
|
test_cases_simple = [
|
||||||
|
({"test": "test"}, {"test": "test"}),
|
||||||
|
({"build": "."}, {"build": {"context": "."}}),
|
||||||
|
({"build": "./dir-1"}, {"build": {"context": "./dir-1"}}),
|
||||||
|
({"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||||
|
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_service_simple():
|
||||||
|
for test_case, expected in copy.deepcopy(test_cases_simple):
|
||||||
|
test_original = copy.deepcopy(test_case)
|
||||||
|
test_case = normalize_service(test_case)
|
||||||
|
test_result = expected == test_case
|
||||||
|
if not test_result:
|
||||||
|
print("test: ", test_original)
|
||||||
|
print("expected: ", expected)
|
||||||
|
print("actual: ", test_case)
|
||||||
|
assert test_result
|
||||||
|
|
||||||
|
|
||||||
|
test_cases_sub_dir = [
|
||||||
|
({"test": "test"}, {"test": "test"}),
|
||||||
|
({"build": "."}, {"build": {"context": "./sub_dir/."}}),
|
||||||
|
({"build": "./dir-1"}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||||
|
({"build": {"context": "./dir-1"}}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
{"build": {"context": "./sub_dir", "dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||||
|
{"build": {"context": "./sub_dir/dir-1", "dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_service_with_sub_dir():
|
||||||
|
for test_case, expected in copy.deepcopy(test_cases_sub_dir):
|
||||||
|
test_original = copy.deepcopy(test_case)
|
||||||
|
test_case = normalize_service(test_case, sub_dir="./sub_dir")
|
||||||
|
test_result = expected == test_case
|
||||||
|
if not test_result:
|
||||||
|
print("test: ", test_original)
|
||||||
|
print("expected: ", expected)
|
||||||
|
print("actual: ", test_case)
|
||||||
|
assert test_result
|
||||||
|
|
||||||
|
|
||||||
|
test_cases_merges = [
|
||||||
|
({}, {}, {}),
|
||||||
|
({}, {"test": "test"}, {"test": "test"}),
|
||||||
|
({"test": "test"}, {}, {"test": "test"}),
|
||||||
|
({"test": "test-1"}, {"test": "test-2"}, {"test": "test-2"}),
|
||||||
|
({}, {"build": "."}, {"build": {"context": "."}}),
|
||||||
|
({"build": "."}, {}, {"build": {"context": "."}}),
|
||||||
|
({"build": "./dir-1"}, {"build": "./dir-2"}, {"build": {"context": "./dir-2"}}),
|
||||||
|
({}, {"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||||
|
({"build": {"context": "./dir-1"}}, {}, {"build": {"context": "./dir-1"}}),
|
||||||
|
(
|
||||||
|
{"build": {"context": "./dir-1"}},
|
||||||
|
{"build": {"context": "./dir-2"}},
|
||||||
|
{"build": {"context": "./dir-2"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
{},
|
||||||
|
{"build": {"dockerfile": "dockerfile-1"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||||
|
{"build": {"context": "./dir-2"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-2"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-1"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1"]}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV2=2"]}},
|
||||||
|
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1", "ENV2=2"]}},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||||
|
for test_input, test_override, expected_result in copy.deepcopy(test_cases_merges):
|
||||||
|
compose_test_1 = {"services": {"test-service": test_input}}
|
||||||
|
compose_test_2 = {"services": {"test-service": test_override}}
|
||||||
|
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||||
|
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||||
|
|
||||||
|
podman_compose = PodmanCompose()
|
||||||
|
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||||
|
|
||||||
|
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||||
|
|
||||||
|
actual_compose = {}
|
||||||
|
if podman_compose.services:
|
||||||
|
podman_compose.services["test-service"].pop("_deps")
|
||||||
|
actual_compose = podman_compose.services["test-service"]
|
||||||
|
if actual_compose != expected_result:
|
||||||
|
print("compose: ", test_input)
|
||||||
|
print("override: ", test_override)
|
||||||
|
print("expected: ", expected_result)
|
||||||
|
print("actual: ", actual_compose)
|
||||||
|
|
||||||
|
compose_expected = expected_result
|
||||||
|
|
||||||
|
assert compose_expected == actual_compose
|
||||||
|
|
||||||
|
|
||||||
|
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||||
|
podman_compose.global_args = argparse.Namespace()
|
||||||
|
podman_compose.global_args.file = file_names
|
||||||
|
podman_compose.global_args.project_name = None
|
||||||
|
podman_compose.global_args.env_file = None
|
||||||
|
podman_compose.global_args.profile = []
|
||||||
|
podman_compose.global_args.in_pod = True
|
||||||
|
podman_compose.global_args.no_normalize = True
|
||||||
|
|
||||||
|
|
||||||
|
def dump_yaml(compose: dict, name: str) -> None:
|
||||||
|
with open(name, "w", encoding="utf-8") as outfile:
|
||||||
|
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_test_yamls() -> None:
|
||||||
|
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||||
|
for file in test_files:
|
||||||
|
if os.path.exists(file):
|
||||||
|
os.remove(file)
|
122
pytests/test_can_merge_cmd_ent.py
Normal file
122
pytests/test_can_merge_cmd_ent.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
from podman_compose import normalize_service, PodmanCompose
|
||||||
|
|
||||||
|
test_keys = ["command", "entrypoint"]
|
||||||
|
|
||||||
|
test_cases_normalise_pre_merge = [
|
||||||
|
({"$$$": []}, {"$$$": []}),
|
||||||
|
({"$$$": ["sh"]}, {"$$$": ["sh"]}),
|
||||||
|
({"$$$": ["sh", "-c", "date"]}, {"$$$": ["sh", "-c", "date"]}),
|
||||||
|
({"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||||
|
({"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||||
|
(
|
||||||
|
{"$$$": "bash -c 'sleep infinity'"},
|
||||||
|
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
test_cases_merges = [
|
||||||
|
({}, {"$$$": []}, {"$$$": []}),
|
||||||
|
({"$$$": []}, {}, {"$$$": []}),
|
||||||
|
({"$$$": []}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||||
|
({"$$$": "sh-2"}, {"$$$": []}, {"$$$": []}),
|
||||||
|
({}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||||
|
({"$$$": "sh"}, {}, {"$$$": ["sh"]}),
|
||||||
|
({"$$$": "sh-1"}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||||
|
({"$$$": ["sh-1"]}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||||
|
({"$$$": "sh-1"}, {"$$$": ["sh-2"]}, {"$$$": ["sh-2"]}),
|
||||||
|
({"$$$": "sh-1"}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||||
|
({"$$$": ["sh-1"]}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||||
|
({"$$$": ["sh-1", "sh-2"]}, {"$$$": ["sh-3", "sh-4"]}, {"$$$": ["sh-3", "sh-4"]}),
|
||||||
|
({}, {"$$$": ["sh-3", "sh 4"]}, {"$$$": ["sh-3", "sh 4"]}),
|
||||||
|
({"$$$": "sleep infinity"}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||||
|
({"$$$": "sh"}, {"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
{"$$$": "bash -c 'sleep infinity'"},
|
||||||
|
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def template_to_expression(base, override, expected, key):
|
||||||
|
base_copy = copy.deepcopy(base)
|
||||||
|
override_copy = copy.deepcopy(override)
|
||||||
|
expected_copy = copy.deepcopy(expected)
|
||||||
|
|
||||||
|
expected_copy[key] = expected_copy.pop("$$$")
|
||||||
|
if "$$$" in base:
|
||||||
|
base_copy[key] = base_copy.pop("$$$")
|
||||||
|
if "$$$" in override:
|
||||||
|
override_copy[key] = override_copy.pop("$$$")
|
||||||
|
return base_copy, override_copy, expected_copy
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_service():
|
||||||
|
for test_input_template, expected_template in test_cases_normalise_pre_merge:
|
||||||
|
for key in test_keys:
|
||||||
|
test_input, _, expected = template_to_expression(
|
||||||
|
test_input_template, {}, expected_template, key
|
||||||
|
)
|
||||||
|
test_input = normalize_service(test_input)
|
||||||
|
test_result = expected == test_input
|
||||||
|
if not test_result:
|
||||||
|
print("base_template: ", test_input_template)
|
||||||
|
print("expected: ", expected)
|
||||||
|
print("actual: ", test_input)
|
||||||
|
assert test_result
|
||||||
|
|
||||||
|
|
||||||
|
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||||
|
for base_template, override_template, expected_template in copy.deepcopy(
|
||||||
|
test_cases_merges
|
||||||
|
):
|
||||||
|
for key in test_keys:
|
||||||
|
base, override, expected = template_to_expression(
|
||||||
|
base_template, override_template, expected_template, key
|
||||||
|
)
|
||||||
|
compose_test_1 = {"services": {"test-service": base}}
|
||||||
|
compose_test_2 = {"services": {"test-service": override}}
|
||||||
|
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||||
|
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||||
|
|
||||||
|
podman_compose = PodmanCompose()
|
||||||
|
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||||
|
|
||||||
|
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||||
|
|
||||||
|
actual = {}
|
||||||
|
if podman_compose.services:
|
||||||
|
podman_compose.services["test-service"].pop("_deps")
|
||||||
|
actual = podman_compose.services["test-service"]
|
||||||
|
if actual != expected:
|
||||||
|
print("compose: ", base)
|
||||||
|
print("override: ", override)
|
||||||
|
print("result: ", expected)
|
||||||
|
|
||||||
|
assert expected == actual
|
||||||
|
|
||||||
|
|
||||||
|
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||||
|
podman_compose.global_args = argparse.Namespace()
|
||||||
|
podman_compose.global_args.file = file_names
|
||||||
|
podman_compose.global_args.project_name = None
|
||||||
|
podman_compose.global_args.env_file = None
|
||||||
|
podman_compose.global_args.profile = []
|
||||||
|
podman_compose.global_args.in_pod = True
|
||||||
|
podman_compose.global_args.no_normalize = None
|
||||||
|
|
||||||
|
|
||||||
|
def dump_yaml(compose: dict, name: str) -> None:
|
||||||
|
with open(name, "w", encoding="utf-8") as outfile:
|
||||||
|
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_test_yamls() -> None:
|
||||||
|
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||||
|
for file in test_files:
|
||||||
|
if os.path.exists(file):
|
||||||
|
os.remove(file)
|
298
pytests/test_normalize_final_build.py
Normal file
298
pytests/test_normalize_final_build.py
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
# pylint: disable=protected-access
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
from podman_compose import (
|
||||||
|
normalize_service,
|
||||||
|
normalize,
|
||||||
|
normalize_final,
|
||||||
|
normalize_service_final,
|
||||||
|
PodmanCompose,
|
||||||
|
)
|
||||||
|
|
||||||
|
cwd = os.path.abspath(".")
|
||||||
|
test_cases_simple_normalization = [
|
||||||
|
({"image": "test-image"}, {"image": "test-image"}),
|
||||||
|
(
|
||||||
|
{"build": "."},
|
||||||
|
{
|
||||||
|
"build": {"context": cwd, "dockerfile": "Dockerfile"},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "../relative"},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": os.path.normpath(os.path.join(cwd, "../relative")),
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "./relative"},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": "/workspace/absolute",
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": ".",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"build": {"context": "../", "dockerfile": "test-dockerfile"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": os.path.normpath(os.path.join(cwd, "../")),
|
||||||
|
"dockerfile": "test-dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"build": {"context": ".", "dockerfile": "./dev/test-dockerfile"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "./dev/test-dockerfile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# [service.build] is normalised after merges
|
||||||
|
#
|
||||||
|
def test_normalize_service_final_returns_absolute_path_in_context() -> None:
|
||||||
|
project_dir = cwd
|
||||||
|
for test_input, expected_service in copy.deepcopy(test_cases_simple_normalization):
|
||||||
|
actual_service = normalize_service_final(test_input, project_dir)
|
||||||
|
assert expected_service == actual_service
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_returns_absolute_path_in_context() -> None:
|
||||||
|
project_dir = cwd
|
||||||
|
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||||
|
compose_test = {"services": {"test-service": test_input}}
|
||||||
|
compose_expected = {"services": {"test-service": expected_result}}
|
||||||
|
actual_compose = normalize_final(compose_test, project_dir)
|
||||||
|
assert compose_expected == actual_compose
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# running full parse over single compose files
|
||||||
|
#
|
||||||
|
def test__parse_compose_file_when_single_compose() -> None:
|
||||||
|
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||||
|
compose_test = {"services": {"test-service": test_input}}
|
||||||
|
dump_yaml(compose_test, "test-compose.yaml")
|
||||||
|
|
||||||
|
podman_compose = PodmanCompose()
|
||||||
|
set_args(podman_compose, ["test-compose.yaml"], no_normalize=None)
|
||||||
|
|
||||||
|
podman_compose._parse_compose_file()
|
||||||
|
|
||||||
|
actual_compose = {}
|
||||||
|
if podman_compose.services:
|
||||||
|
podman_compose.services["test-service"].pop("_deps")
|
||||||
|
actual_compose = podman_compose.services["test-service"]
|
||||||
|
if actual_compose != expected_result:
|
||||||
|
print("compose: ", test_input)
|
||||||
|
print("result: ", expected_result)
|
||||||
|
|
||||||
|
assert expected_result == actual_compose
|
||||||
|
|
||||||
|
|
||||||
|
test_cases_with_merges = [
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
{"build": "."},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "."},
|
||||||
|
{},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{"build": "./relative"},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "./relative"},
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "./relative"},
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "test-dockerfile"}},
|
||||||
|
{},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
{"build": {"dockerfile": "test-dockerfile"}},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{},
|
||||||
|
{"build": {"dockerfile": "test-dockerfile"}},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "test-dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "test-dockerfile-2"}},
|
||||||
|
{"build": {"context": cwd, "dockerfile": "test-dockerfile-2"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{"build": {"dockerfile": "test-dockerfile"}},
|
||||||
|
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "test-dockerfile"}},
|
||||||
|
{"build": "/workspace/absolute"},
|
||||||
|
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-1"}},
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV1=1"]}},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "./test-dockerfile-2",
|
||||||
|
"args": ["ENV1=1"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-2"}},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "./test-dockerfile-2",
|
||||||
|
"args": ["ENV1=1"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||||
|
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV2=2"]}},
|
||||||
|
{
|
||||||
|
"build": {
|
||||||
|
"context": cwd,
|
||||||
|
"dockerfile": "./test-dockerfile-2",
|
||||||
|
"args": ["ENV1=1", "ENV2=2"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# running full parse over merged
|
||||||
|
#
|
||||||
|
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||||
|
for test_input, test_override, expected_result in copy.deepcopy(
|
||||||
|
test_cases_with_merges
|
||||||
|
):
|
||||||
|
compose_test_1 = {"services": {"test-service": test_input}}
|
||||||
|
compose_test_2 = {"services": {"test-service": test_override}}
|
||||||
|
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||||
|
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||||
|
|
||||||
|
podman_compose = PodmanCompose()
|
||||||
|
set_args(
|
||||||
|
podman_compose,
|
||||||
|
["test-compose-1.yaml", "test-compose-2.yaml"],
|
||||||
|
no_normalize=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
podman_compose._parse_compose_file()
|
||||||
|
|
||||||
|
actual_compose = {}
|
||||||
|
if podman_compose.services:
|
||||||
|
podman_compose.services["test-service"].pop("_deps")
|
||||||
|
actual_compose = podman_compose.services["test-service"]
|
||||||
|
if actual_compose != expected_result:
|
||||||
|
print("compose: ", test_input)
|
||||||
|
print("override: ", test_override)
|
||||||
|
print("result: ", expected_result)
|
||||||
|
compose_expected = expected_result
|
||||||
|
|
||||||
|
assert compose_expected == actual_compose
|
||||||
|
|
||||||
|
|
||||||
|
def set_args(
|
||||||
|
podman_compose: PodmanCompose, file_names: list[str], no_normalize: bool
|
||||||
|
) -> None:
|
||||||
|
podman_compose.global_args = argparse.Namespace()
|
||||||
|
podman_compose.global_args.file = file_names
|
||||||
|
podman_compose.global_args.project_name = None
|
||||||
|
podman_compose.global_args.env_file = None
|
||||||
|
podman_compose.global_args.profile = []
|
||||||
|
podman_compose.global_args.in_pod = True
|
||||||
|
podman_compose.global_args.no_normalize = no_normalize
|
||||||
|
|
||||||
|
|
||||||
|
def dump_yaml(compose: dict, name: str) -> None:
|
||||||
|
# Path(Path.cwd()/"subdirectory").mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(name, "w", encoding="utf-8") as outfile:
|
||||||
|
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_test_yamls() -> None:
|
||||||
|
test_files = ["test-compose-1.yaml", "test-compose-2.yaml", "test-compose.yaml"]
|
||||||
|
for file in test_files:
|
||||||
|
if os.path.exists(file):
|
||||||
|
os.remove(file)
|
@ -1,3 +1,4 @@
|
|||||||
|
# pylint: disable=redefined-outer-name
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from podman_compose import parse_short_mount
|
from podman_compose import parse_short_mount
|
||||||
|
@ -3,3 +3,7 @@ universal = 1
|
|||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
version = attr: podman_compose.__version__
|
version = attr: podman_compose.__version__
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
# The GitHub editor is 127 chars wide
|
||||||
|
max-line-length=127
|
13
setup.py
13
setup.py
@ -2,24 +2,25 @@ import os
|
|||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
try:
|
try:
|
||||||
readme = open(os.path.join(os.path.dirname(__file__), "README.md")).read()
|
README = open(
|
||||||
except:
|
os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8"
|
||||||
readme = ""
|
).read()
|
||||||
|
except: # noqa: E722 # pylint: disable=bare-except
|
||||||
|
README = ""
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="podman-compose",
|
name="podman-compose",
|
||||||
description="A script to run docker-compose.yml using podman",
|
description="A script to run docker-compose.yml using podman",
|
||||||
long_description=readme,
|
long_description=README,
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.5",
|
|
||||||
"Programming Language :: Python :: 3.6",
|
|
||||||
"Programming Language :: Python :: 3.7",
|
"Programming Language :: Python :: 3.7",
|
||||||
"Programming Language :: Python :: 3.8",
|
"Programming Language :: Python :: 3.8",
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Development Status :: 3 - Alpha",
|
"Development Status :: 3 - Alpha",
|
||||||
|
22
tests/build_fail/README.md
Normal file
22
tests/build_fail/README.md
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Test podman-compose with build (fail scenario)
|
||||||
|
|
||||||
|
```shell
|
||||||
|
podman-compose build || echo $?
|
||||||
|
```
|
||||||
|
|
||||||
|
expected output would be something like
|
||||||
|
|
||||||
|
```
|
||||||
|
STEP 1/3: FROM busybox
|
||||||
|
STEP 2/3: RUN this_command_does_not_exist
|
||||||
|
/bin/sh: this_command_does_not_exist: not found
|
||||||
|
Error: building at STEP "RUN this_command_does_not_exist": while running runtime: exit status 127
|
||||||
|
|
||||||
|
exit code: 127
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected `podman-compose` exit code:
|
||||||
|
```shell
|
||||||
|
echo $?
|
||||||
|
127
|
||||||
|
```
|
3
tests/build_fail/context/Dockerfile
Normal file
3
tests/build_fail/context/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
FROM busybox
|
||||||
|
RUN this_command_does_not_exist
|
||||||
|
CMD ["sh"]
|
5
tests/build_fail/docker-compose.yml
Normal file
5
tests/build_fail/docker-compose.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
test:
|
||||||
|
build: ./context
|
||||||
|
image: build-fail-img
|
26
tests/conftest.py
Normal file
26
tests/conftest.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
"""conftest.py
|
||||||
|
|
||||||
|
Defines global pytest fixtures available to all tests.
|
||||||
|
"""
|
||||||
|
# pylint: disable=redefined-outer-name
|
||||||
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def base_path():
|
||||||
|
"""Returns the base path for the project"""
|
||||||
|
return Path(__file__).parent.parent
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_path(base_path):
|
||||||
|
"""Returns the path to the tests directory"""
|
||||||
|
return os.path.join(base_path, "tests")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def podman_compose_path(base_path):
|
||||||
|
"""Returns the path to the podman compose script"""
|
||||||
|
return os.path.join(base_path, "podman_compose.py")
|
9
tests/env-file-tests/README.md
Normal file
9
tests/env-file-tests/README.md
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
running the following commands should always give podman-rocks-123
|
||||||
|
|
||||||
|
```
|
||||||
|
podman-compose -f project/container-compose.yaml --env-file env-files/project-1.env up
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
podman-compose -f $(pwd)/project/container-compose.yaml --env-file $(pwd)/env-files/project-1.env up
|
||||||
|
```
|
1
tests/env-file-tests/env-files/project-1.env
Normal file
1
tests/env-file-tests/env-files/project-1.env
Normal file
@ -0,0 +1 @@
|
|||||||
|
ZZVAR1=podman-rocks-123
|
9
tests/env-file-tests/project/container-compose.yaml
Normal file
9
tests/env-file-tests/project/container-compose.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "sh", "-c", "env | grep ZZ"]
|
||||||
|
tmpfs:
|
||||||
|
- /run
|
||||||
|
- /tmp
|
||||||
|
environment:
|
||||||
|
ZZVAR1: $ZZVAR1
|
7
tests/extends_w_empty_service/common-services.yml
Normal file
7
tests/extends_w_empty_service/common-services.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
services:
|
||||||
|
webapp_default:
|
||||||
|
|
||||||
|
webapp_special:
|
||||||
|
image: busybox
|
||||||
|
volumes:
|
||||||
|
- "/data"
|
10
tests/extends_w_empty_service/docker-compose.yml
Normal file
10
tests/extends_w_empty_service/docker-compose.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
image: busybox
|
||||||
|
extends:
|
||||||
|
file: common-services.yml
|
||||||
|
service: webapp_default
|
||||||
|
environment:
|
||||||
|
- DEBUG=1
|
||||||
|
cpu_shares: 5
|
7
tests/include/docker-compose.base.yaml
Normal file
7
tests/include/docker-compose.base.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
version: '3.6'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "httpd", "-f", "-h", ".", "-p", "8003"]
|
||||||
|
|
4
tests/include/docker-compose.yaml
Normal file
4
tests/include/docker-compose.yaml
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
version: '3.6'
|
||||||
|
|
||||||
|
include:
|
||||||
|
- docker-compose.base.yaml
|
24
tests/profile/docker-compose.yml
Normal file
24
tests/profile/docker-compose.yml
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
default-service:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||||
|
tmpfs:
|
||||||
|
- /run
|
||||||
|
- /tmp
|
||||||
|
service-1:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||||
|
tmpfs:
|
||||||
|
- /run
|
||||||
|
- /tmp
|
||||||
|
profiles:
|
||||||
|
- profile-1
|
||||||
|
service-2:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||||
|
tmpfs:
|
||||||
|
- /run
|
||||||
|
- /tmp
|
||||||
|
profiles:
|
||||||
|
- profile-2
|
@ -46,16 +46,39 @@ def test_podman_compose_extends_w_file_subdir():
|
|||||||
"docker.io/library/busybox",
|
"docker.io/library/busybox",
|
||||||
]
|
]
|
||||||
|
|
||||||
out, err, returncode = capture(command_up)
|
out, _, returncode = capture(command_up)
|
||||||
assert 0 == returncode
|
assert 0 == returncode
|
||||||
# check container was created and exists
|
# check container was created and exists
|
||||||
out, err, returncode = capture(command_check_container)
|
out, _, returncode = capture(command_check_container)
|
||||||
assert 0 == returncode
|
assert 0 == returncode
|
||||||
assert out == b'"localhost/subdir_test:me"\n'
|
assert out == b'"localhost/subdir_test:me"\n'
|
||||||
out, err, returncode = capture(command_down)
|
out, _, returncode = capture(command_down)
|
||||||
# cleanup test image(tags)
|
# cleanup test image(tags)
|
||||||
assert 0 == returncode
|
assert 0 == returncode
|
||||||
# check container did not exists anymore
|
# check container did not exists anymore
|
||||||
out, err, returncode = capture(command_check_container)
|
out, _, returncode = capture(command_check_container)
|
||||||
assert 0 == returncode
|
assert 0 == returncode
|
||||||
assert out == b""
|
assert out == b""
|
||||||
|
|
||||||
|
|
||||||
|
def test_podman_compose_extends_w_empty_service():
|
||||||
|
"""
|
||||||
|
Test that podman-compose can execute podman-compose -f <file> up with extended File which
|
||||||
|
includes an empty service. (e.g. if the file is used as placeholder for more complex configurations.)
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
main_path = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
command_up = [
|
||||||
|
"python3",
|
||||||
|
str(main_path.joinpath("podman_compose.py")),
|
||||||
|
"-f",
|
||||||
|
str(
|
||||||
|
main_path.joinpath("tests", "extends_w_empty_service", "docker-compose.yml")
|
||||||
|
),
|
||||||
|
"up",
|
||||||
|
"-d",
|
||||||
|
]
|
||||||
|
|
||||||
|
_, _, returncode = capture(command_up)
|
||||||
|
assert 0 == returncode
|
||||||
|
78
tests/test_podman_compose_config.py
Normal file
78
tests/test_podman_compose_config.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
"""
|
||||||
|
test_podman_compose_config.py
|
||||||
|
|
||||||
|
Tests the podman-compose config command which is used to return defined compose services.
|
||||||
|
"""
|
||||||
|
# pylint: disable=redefined-outer-name
|
||||||
|
import os
|
||||||
|
from test_podman_compose import capture
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def profile_compose_file(test_path):
|
||||||
|
""" "Returns the path to the `profile` compose file used for this test module"""
|
||||||
|
return os.path.join(test_path, "profile", "docker-compose.yml")
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_no_profiles(podman_compose_path, profile_compose_file):
|
||||||
|
"""
|
||||||
|
Tests podman-compose config command without profile enablement.
|
||||||
|
|
||||||
|
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
|
||||||
|
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
|
||||||
|
"""
|
||||||
|
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file, "config"]
|
||||||
|
|
||||||
|
out, _, return_code = capture(config_cmd)
|
||||||
|
assert return_code == 0
|
||||||
|
|
||||||
|
string_output = out.decode("utf-8")
|
||||||
|
assert "default-service" in string_output
|
||||||
|
assert "service-1" not in string_output
|
||||||
|
assert "service-2" not in string_output
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"profiles, expected_services",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
["--profile", "profile-1", "config"],
|
||||||
|
{"default-service": True, "service-1": True, "service-2": False},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
["--profile", "profile-2", "config"],
|
||||||
|
{"default-service": True, "service-1": False, "service-2": True},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
["--profile", "profile-1", "--profile", "profile-2", "config"],
|
||||||
|
{"default-service": True, "service-1": True, "service-2": True},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_config_profiles(
|
||||||
|
podman_compose_path, profile_compose_file, profiles, expected_services
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Tests podman-compose
|
||||||
|
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
|
||||||
|
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
|
||||||
|
:param profiles: The enabled profiles for the parameterized test.
|
||||||
|
:param expected_services: Dictionary used to model the expected "enabled" services in the profile.
|
||||||
|
Key = service name, Value = True if the service is enabled, otherwise False.
|
||||||
|
"""
|
||||||
|
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file]
|
||||||
|
config_cmd.extend(profiles)
|
||||||
|
|
||||||
|
out, _, return_code = capture(config_cmd)
|
||||||
|
assert return_code == 0
|
||||||
|
|
||||||
|
actual_output = out.decode("utf-8")
|
||||||
|
|
||||||
|
assert len(expected_services) == 3
|
||||||
|
|
||||||
|
actual_services = {}
|
||||||
|
for service, _ in expected_services.items():
|
||||||
|
actual_services[service] = service in actual_output
|
||||||
|
|
||||||
|
assert expected_services == actual_services
|
71
tests/test_podman_compose_include.py
Normal file
71
tests/test_podman_compose_include.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def capture(command):
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
out, err = proc.communicate()
|
||||||
|
return out, err, proc.returncode
|
||||||
|
|
||||||
|
|
||||||
|
def test_podman_compose_include():
|
||||||
|
"""
|
||||||
|
Test that podman-compose can execute podman-compose -f <file> up with include
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
main_path = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
command_up = [
|
||||||
|
"python3",
|
||||||
|
str(main_path.joinpath("podman_compose.py")),
|
||||||
|
"-f",
|
||||||
|
str(main_path.joinpath("tests", "include", "docker-compose.yaml")),
|
||||||
|
"up",
|
||||||
|
"-d",
|
||||||
|
]
|
||||||
|
|
||||||
|
command_check_container = [
|
||||||
|
"podman",
|
||||||
|
"ps",
|
||||||
|
"-a",
|
||||||
|
"--filter",
|
||||||
|
"label=io.podman.compose.project=include",
|
||||||
|
"--format",
|
||||||
|
'"{{.Image}}"',
|
||||||
|
]
|
||||||
|
|
||||||
|
command_container_id = [
|
||||||
|
"podman",
|
||||||
|
"ps",
|
||||||
|
"-a",
|
||||||
|
"--filter",
|
||||||
|
"label=io.podman.compose.project=include",
|
||||||
|
"--format",
|
||||||
|
'"{{.ID}}"',
|
||||||
|
]
|
||||||
|
|
||||||
|
command_down = ["podman", "rm", "--force", "CONTAINER_ID"]
|
||||||
|
|
||||||
|
out, _, returncode = capture(command_up)
|
||||||
|
assert 0 == returncode
|
||||||
|
out, _, returncode = capture(command_check_container)
|
||||||
|
assert 0 == returncode
|
||||||
|
assert out == b'"docker.io/library/busybox:latest"\n'
|
||||||
|
# Get container ID to remove it
|
||||||
|
out, _, returncode = capture(command_container_id)
|
||||||
|
assert 0 == returncode
|
||||||
|
assert out != b""
|
||||||
|
container_id = out.decode().strip().replace('"', "")
|
||||||
|
command_down[3] = container_id
|
||||||
|
out, _, returncode = capture(command_down)
|
||||||
|
# cleanup test image(tags)
|
||||||
|
assert 0 == returncode
|
||||||
|
assert out != b""
|
||||||
|
# check container did not exists anymore
|
||||||
|
out, _, returncode = capture(command_check_container)
|
||||||
|
assert 0 == returncode
|
||||||
|
assert out == b""
|
89
tests/test_podman_compose_up_down.py
Normal file
89
tests/test_podman_compose_up_down.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
"""
|
||||||
|
test_podman_compose_up_down.py
|
||||||
|
|
||||||
|
Tests the podman compose up and down commands used to create and remove services.
|
||||||
|
"""
|
||||||
|
# pylint: disable=redefined-outer-name
|
||||||
|
import os
|
||||||
|
from test_podman_compose import capture
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def profile_compose_file(test_path):
|
||||||
|
""" "Returns the path to the `profile` compose file used for this test module"""
|
||||||
|
return os.path.join(test_path, "profile", "docker-compose.yml")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def teardown(podman_compose_path, profile_compose_file):
|
||||||
|
"""
|
||||||
|
Ensures that the services within the "profile compose file" are removed between each test case.
|
||||||
|
|
||||||
|
:param podman_compose_path: The path to the podman compose script.
|
||||||
|
:param profile_compose_file: The path to the compose file used for this test module.
|
||||||
|
"""
|
||||||
|
# run the test case
|
||||||
|
yield
|
||||||
|
|
||||||
|
down_cmd = [
|
||||||
|
"python3",
|
||||||
|
podman_compose_path,
|
||||||
|
"--profile",
|
||||||
|
"profile-1",
|
||||||
|
"--profile",
|
||||||
|
"profile-2",
|
||||||
|
"-f",
|
||||||
|
profile_compose_file,
|
||||||
|
"down",
|
||||||
|
]
|
||||||
|
capture(down_cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"profiles, expected_services",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
["--profile", "profile-1", "up", "-d"],
|
||||||
|
{"default-service": True, "service-1": True, "service-2": False},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
["--profile", "profile-2", "up", "-d"],
|
||||||
|
{"default-service": True, "service-1": False, "service-2": True},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
["--profile", "profile-1", "--profile", "profile-2", "up", "-d"],
|
||||||
|
{"default-service": True, "service-1": True, "service-2": True},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_up(podman_compose_path, profile_compose_file, profiles, expected_services):
|
||||||
|
up_cmd = [
|
||||||
|
"python3",
|
||||||
|
podman_compose_path,
|
||||||
|
"-f",
|
||||||
|
profile_compose_file,
|
||||||
|
]
|
||||||
|
up_cmd.extend(profiles)
|
||||||
|
|
||||||
|
out, _, return_code = capture(up_cmd)
|
||||||
|
assert return_code == 0
|
||||||
|
|
||||||
|
check_cmd = [
|
||||||
|
"podman",
|
||||||
|
"container",
|
||||||
|
"ps",
|
||||||
|
"--format",
|
||||||
|
'"{{.Names}}"',
|
||||||
|
]
|
||||||
|
out, _, return_code = capture(check_cmd)
|
||||||
|
assert return_code == 0
|
||||||
|
|
||||||
|
assert len(expected_services) == 3
|
||||||
|
actual_output = out.decode("utf-8")
|
||||||
|
|
||||||
|
actual_services = {}
|
||||||
|
for service, _ in expected_services.items():
|
||||||
|
actual_services[service] = service in actual_output
|
||||||
|
|
||||||
|
assert expected_services == actual_services
|
15
tests/uidmaps/docker-compose.yml
Normal file
15
tests/uidmaps/docker-compose.yml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
version: "3.7"
|
||||||
|
services:
|
||||||
|
touch:
|
||||||
|
image: busybox
|
||||||
|
command: 'touch /mnt/test'
|
||||||
|
volumes:
|
||||||
|
- ./:/mnt
|
||||||
|
user: 999:999
|
||||||
|
x-podman:
|
||||||
|
uidmaps:
|
||||||
|
- "0:1:1"
|
||||||
|
- "999:0:1"
|
||||||
|
gidmaps:
|
||||||
|
- "0:1:1"
|
||||||
|
- "999:0:1"
|
7
tests/volumes_merge/docker-compose.override.yaml
Normal file
7
tests/volumes_merge/docker-compose.override.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
volumes:
|
||||||
|
- ./override.txt:/var/www/html/index.html:ro,z
|
||||||
|
- ./override.txt:/var/www/html/index2.html:z
|
||||||
|
- ./override.txt:/var/www/html/index3.html
|
11
tests/volumes_merge/docker-compose.yaml
Normal file
11
tests/volumes_merge/docker-compose.yaml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
image: busybox
|
||||||
|
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8080"]
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
volumes:
|
||||||
|
- ./index.txt:/var/www/html/index.html:ro,z
|
||||||
|
- ./index.txt:/var/www/html/index2.html
|
||||||
|
- ./index.txt:/var/www/html/index3.html:ro
|
1
tests/volumes_merge/index.txt
Normal file
1
tests/volumes_merge/index.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
The file from docker-compose.yaml
|
1
tests/volumes_merge/override.txt
Normal file
1
tests/volumes_merge/override.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
The file from docker-compose.override.yaml
|
Reference in New Issue
Block a user