forked from extern/podman-compose
Compare commits
2 Commits
devel
...
revert-574
Author | SHA1 | Date | |
---|---|---|---|
|
1f4a4d2184 | ||
|
08a453d643 |
2
.github/workflows/pylint.yml
vendored
2
.github/workflows/pylint.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.8", "3.9", "3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
|
@ -9,9 +9,6 @@ repos:
|
||||
# https://pre-commit.com/#top_level-default_language_version
|
||||
language_version: python3.10
|
||||
types: [python]
|
||||
args: [
|
||||
"--check", # Don't apply changes automatically
|
||||
]
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
|
@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Manage AWX Container Images
|
||||
block:
|
||||
- name: Export Docker awx image if it isn't local and there isn't a registry defined
|
||||
- name: Export Docker awx image if it isnt local and there isnt a registry defined
|
||||
docker_image:
|
||||
name: "{{ awx_image }}"
|
||||
tag: "{{ awx_version }}"
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#! /usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# https://docs.docker.com/compose/compose-file/#service-configuration-reference
|
||||
@ -220,7 +220,7 @@ def fix_mount_dict(compose, mount_dict, proj_name, srv_name):
|
||||
vol = (vols.get(source, None) or {}) if source else {}
|
||||
name = vol.get("name", None)
|
||||
mount_dict["_vol"] = vol
|
||||
# handle anonymous or implied volume
|
||||
# handle anonymouse or implied volume
|
||||
if not source:
|
||||
# missing source
|
||||
vol["name"] = "_".join(
|
||||
@ -591,7 +591,7 @@ def get_secret_args(compose, cnt, secret):
|
||||
# docker-compose does not support external secrets outside of swarm mode.
|
||||
# However accessing these via podman is trivial
|
||||
# since these commands are directly translated to
|
||||
# podman-create commands, albeit we can only support a 1:1 mapping
|
||||
# podman-create commands, albiet we can only support a 1:1 mapping
|
||||
# at the moment
|
||||
if declared_secret.get("external", False) or declared_secret.get("name", None):
|
||||
secret_opts += f",uid={uid}" if uid else ""
|
||||
@ -618,7 +618,7 @@ def get_secret_args(compose, cnt, secret):
|
||||
return ["--secret", "{}{}".format(secret_name, secret_opts)]
|
||||
|
||||
raise ValueError(
|
||||
'ERROR: unparsable secret: "{}", service: "{}"'.format(
|
||||
'ERROR: unparseable secret: "{}", service: "{}"'.format(
|
||||
secret_name, cnt["_service"]
|
||||
)
|
||||
)
|
||||
@ -758,7 +758,7 @@ def assert_cnt_nets(compose, cnt):
|
||||
driver_opts = net_desc.get("driver_opts", None) or {}
|
||||
for key, value in driver_opts.items():
|
||||
args.extend(("--opt", f"{key}={value}"))
|
||||
ipam = net_desc.get("ipam", None) or {}
|
||||
ipam = (net_desc.get("ipam", None) or {})
|
||||
ipam_driver = ipam.get("driver", None)
|
||||
if ipam_driver:
|
||||
args.extend(("--ipam-driver", ipam_driver))
|
||||
@ -823,18 +823,16 @@ def get_net_args(compose, cnt):
|
||||
ip = None
|
||||
ip6 = None
|
||||
ip_assignments = 0
|
||||
if cnt.get("_aliases", None):
|
||||
aliases.extend(cnt.get("_aliases", None))
|
||||
if cnt_nets and is_dict(cnt_nets):
|
||||
prioritized_cnt_nets = []
|
||||
# cnt_nets is {net_key: net_value, ...}
|
||||
for net_key, net_value in cnt_nets.items():
|
||||
net_value = net_value or {}
|
||||
aliases.extend(norm_as_list(net_value.get("aliases", None)))
|
||||
if net_value.get("ipv4_address", None) is not None:
|
||||
ip_assignments = ip_assignments + 1
|
||||
if net_value.get("ipv6_address", None) is not None:
|
||||
ip_assignments = ip_assignments + 1
|
||||
if net_value.get("ipv4_address", None) != None:
|
||||
ip_assignments = ip_assignments + 1
|
||||
if net_value.get("ipv6_address", None) != None:
|
||||
ip_assignments = ip_assignments + 1
|
||||
|
||||
if not ip:
|
||||
ip = net_value.get("ipv4_address", None)
|
||||
@ -864,37 +862,33 @@ def get_net_args(compose, cnt):
|
||||
net_names_str = ",".join(net_names)
|
||||
|
||||
if ip_assignments > 1:
|
||||
multiple_nets = cnt.get("networks", None)
|
||||
multiple_net_names = multiple_nets.keys()
|
||||
multipleNets = cnt.get("networks", None)
|
||||
multipleNetNames = multipleNets.keys()
|
||||
|
||||
for net_ in multiple_net_names:
|
||||
net_desc = nets[net_] or {}
|
||||
is_ext = net_desc.get("external", None)
|
||||
ext_desc = is_ext if is_dict(is_ext) else {}
|
||||
default_net_name = net_ if is_ext else f"{proj_name}_{net_}"
|
||||
net_name = (
|
||||
ext_desc.get("name", None)
|
||||
or net_desc.get("name", None)
|
||||
or default_net_name
|
||||
)
|
||||
for net_ in multipleNetNames:
|
||||
net_desc = nets[net_] or {}
|
||||
is_ext = net_desc.get("external", None)
|
||||
ext_desc = is_ext if is_dict(is_ext) else {}
|
||||
default_net_name = net_ if is_ext else f"{proj_name}_{net_}"
|
||||
net_name = (
|
||||
ext_desc.get("name", None) or net_desc.get("name", None) or default_net_name
|
||||
)
|
||||
|
||||
ipv4 = multiple_nets[net_].get("ipv4_address", None)
|
||||
ipv6 = multiple_nets[net_].get("ipv6_address", None)
|
||||
if ipv4 is not None and ipv6 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv4},ip={ipv6}"])
|
||||
elif ipv4 is None and ipv6 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv6}"])
|
||||
elif ipv6 is None and ipv4 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv4}"])
|
||||
ipv4 = multipleNets[net_].get("ipv4_address",None)
|
||||
ipv6 = multipleNets[net_].get("ipv6_address",None)
|
||||
if ipv4 is not None and ipv6 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv4},ip={ipv6}"])
|
||||
elif ipv4 is None and ipv6 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv6}"])
|
||||
elif ipv6 is None and ipv4 is not None:
|
||||
net_args.extend(["--network", f"{net_name}:ip={ipv4}"])
|
||||
else:
|
||||
if is_bridge:
|
||||
net_args.extend(
|
||||
["--net", net_names_str, "--network-alias", ",".join(aliases)]
|
||||
)
|
||||
if ip:
|
||||
net_args.append(f"--ip={ip}")
|
||||
if ip6:
|
||||
net_args.append(f"--ip6={ip6}")
|
||||
if is_bridge:
|
||||
net_args.extend(["--net", net_names_str, "--network-alias", ",".join(aliases)])
|
||||
if ip:
|
||||
net_args.append(f"--ip={ip}")
|
||||
if ip6:
|
||||
net_args.append(f"--ip6={ip6}")
|
||||
return net_args
|
||||
|
||||
|
||||
@ -1030,7 +1024,7 @@ def container_to_args(compose, cnt, detached=True):
|
||||
# WIP: healthchecks are still work in progress
|
||||
healthcheck = cnt.get("healthcheck", None) or {}
|
||||
if not is_dict(healthcheck):
|
||||
raise ValueError("'healthcheck' must be a key-value mapping")
|
||||
raise ValueError("'healthcheck' must be an key-value mapping")
|
||||
healthcheck_disable = healthcheck.get("disable", False)
|
||||
healthcheck_test = healthcheck.get("test", None)
|
||||
if healthcheck_disable:
|
||||
@ -1140,12 +1134,6 @@ def flat_deps(services, with_extends=False):
|
||||
if not is_list(links_ls):
|
||||
links_ls = [links_ls]
|
||||
deps.update([(c.split(":")[0] if ":" in c else c) for c in links_ls])
|
||||
for c in links_ls:
|
||||
if ":" in c:
|
||||
dep_name, dep_alias = c.split(":")
|
||||
if not "_aliases" in services[dep_name]:
|
||||
services[dep_name]["_aliases"] = set()
|
||||
services[dep_name]["_aliases"].add(dep_alias)
|
||||
for name, srv in services.items():
|
||||
rec_deps(services, name)
|
||||
|
||||
@ -1245,13 +1233,12 @@ class Podman:
|
||||
|
||||
|
||||
def normalize_service(service, sub_dir=""):
|
||||
if "build" in service:
|
||||
build = service["build"]
|
||||
if is_str(build):
|
||||
service["build"] = {"context": build}
|
||||
# make `build.context` relative to sub_dir
|
||||
# TODO: should we make volume and secret relative too?
|
||||
if sub_dir and "build" in service:
|
||||
build = service["build"]
|
||||
context = build.get("context", None) or ""
|
||||
context = build if is_str(build) else build.get("context", None)
|
||||
context = context or ""
|
||||
if context or sub_dir:
|
||||
if context.startswith("./"):
|
||||
context = context[2:]
|
||||
@ -1260,11 +1247,10 @@ def normalize_service(service, sub_dir=""):
|
||||
context = context.rstrip("/")
|
||||
if not context:
|
||||
context = "."
|
||||
service["build"]["context"] = context
|
||||
for key in ("command", "entrypoint"):
|
||||
if key in service:
|
||||
if is_str(service[key]):
|
||||
service[key] = shlex.split(service[key])
|
||||
if is_str(build):
|
||||
service["build"] = context
|
||||
else:
|
||||
service["build"]["context"] = context
|
||||
for key in ("env_file", "security_opt", "volumes"):
|
||||
if key not in service:
|
||||
continue
|
||||
@ -1297,30 +1283,6 @@ def normalize(compose):
|
||||
return compose
|
||||
|
||||
|
||||
def normalize_service_final(service: dict, project_dir: str) -> dict:
|
||||
if "build" in service:
|
||||
build = service["build"]
|
||||
context = build if is_str(build) else build.get("context", ".")
|
||||
context = os.path.normpath(os.path.join(project_dir, context))
|
||||
dockerfile = (
|
||||
"Dockerfile"
|
||||
if is_str(build)
|
||||
else service["build"].get("dockerfile", "Dockerfile")
|
||||
)
|
||||
if not is_dict(service["build"]):
|
||||
service["build"] = {}
|
||||
service["build"]["dockerfile"] = dockerfile
|
||||
service["build"]["context"] = context
|
||||
return service
|
||||
|
||||
|
||||
def normalize_final(compose: dict, project_dir: str) -> dict:
|
||||
services = compose.get("services", None) or {}
|
||||
for service in services.values():
|
||||
normalize_service_final(service, project_dir)
|
||||
return compose
|
||||
|
||||
|
||||
def clone(value):
|
||||
return value.copy() if is_list(value) or is_dict(value) else value
|
||||
|
||||
@ -1341,14 +1303,14 @@ def rec_merge_one(target, source):
|
||||
if key not in source:
|
||||
continue
|
||||
value2 = source[key]
|
||||
if key in ("command", "entrypoint"):
|
||||
if key == "command":
|
||||
target[key] = clone(value2)
|
||||
continue
|
||||
if not isinstance(value2, type(value)):
|
||||
value_type = type(value)
|
||||
value2_type = type(value2)
|
||||
raise ValueError(
|
||||
f"can't merge value of [{key}] of type {value_type} and {value2_type}"
|
||||
f"can't merge value of {key} of type {value_type} and {value2_type}"
|
||||
)
|
||||
if is_list(value2):
|
||||
if key == "volumes":
|
||||
@ -1399,7 +1361,7 @@ def resolve_extends(services, service_names, environ):
|
||||
content = content["services"]
|
||||
subdirectory = os.path.dirname(filename)
|
||||
content = rec_subs(content, environ)
|
||||
from_service = content.get(from_service_name, {}) or {}
|
||||
from_service = content.get(from_service_name, {})
|
||||
normalize_service(from_service, subdirectory)
|
||||
else:
|
||||
from_service = services.get(from_service_name, {}).copy()
|
||||
@ -1521,9 +1483,7 @@ class PodmanCompose:
|
||||
if compose_required:
|
||||
self._parse_compose_file()
|
||||
cmd = self.commands[cmd_name]
|
||||
retcode = cmd(self, args)
|
||||
if isinstance(retcode, int):
|
||||
sys.exit(retcode)
|
||||
cmd(self, args)
|
||||
|
||||
def _parse_compose_file(self):
|
||||
args = self.global_args
|
||||
@ -1562,16 +1522,11 @@ class PodmanCompose:
|
||||
dirname = os.path.realpath(os.path.dirname(filename))
|
||||
dir_basename = os.path.basename(dirname)
|
||||
self.dirname = dirname
|
||||
|
||||
# env-file is relative to the CWD
|
||||
dotenv_dict = {}
|
||||
if args.env_file:
|
||||
dotenv_path = os.path.realpath(args.env_file)
|
||||
dotenv_dict = dotenv_to_dict(dotenv_path)
|
||||
|
||||
# TODO: remove next line
|
||||
os.chdir(dirname)
|
||||
|
||||
dotenv_path = os.path.join(dirname, args.env_file)
|
||||
dotenv_dict = dotenv_to_dict(dotenv_path)
|
||||
os.environ.update(
|
||||
{
|
||||
key: value
|
||||
@ -1591,15 +1546,7 @@ class PodmanCompose:
|
||||
}
|
||||
)
|
||||
compose = {}
|
||||
# Iterate over files primitively to allow appending to files in-loop
|
||||
files_iter = iter(files)
|
||||
|
||||
while True:
|
||||
try:
|
||||
filename = next(files_iter)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
for filename in files:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
content = yaml.safe_load(f)
|
||||
# log(filename, json.dumps(content, indent = 2))
|
||||
@ -1613,22 +1560,10 @@ class PodmanCompose:
|
||||
# log(filename, json.dumps(content, indent = 2))
|
||||
content = rec_subs(content, self.environ)
|
||||
rec_merge(compose, content)
|
||||
# If `include` is used, append included files to files
|
||||
include = compose.get("include", None)
|
||||
if include:
|
||||
files.append(*include)
|
||||
# As compose obj is updated and tested with every loop, not deleting `include`
|
||||
# from it, results in it being tested again and again, original values for
|
||||
# `include` be appended to `files`, and, included files be processed for ever.
|
||||
# Solution is to remove 'include' key from compose obj. This doesn't break
|
||||
# having `include` present and correctly processed in included files
|
||||
del compose["include"]
|
||||
resolved_services = self._resolve_profiles(
|
||||
compose.get("services", {}), set(args.profile)
|
||||
)
|
||||
compose["services"] = resolved_services
|
||||
if not getattr(args, "no_normalize", None):
|
||||
compose = normalize_final(compose, self.dirname)
|
||||
self.merged_yaml = yaml.safe_dump(compose)
|
||||
merged_json_b = json.dumps(compose, separators=(",", ":")).encode("utf-8")
|
||||
self.yaml_hash = hashlib.sha256(merged_json_b).hexdigest()
|
||||
@ -1643,8 +1578,7 @@ class PodmanCompose:
|
||||
if project_name is None:
|
||||
# More strict then actually needed for simplicity: podman requires [a-zA-Z0-9][a-zA-Z0-9_.-]*
|
||||
project_name = (
|
||||
self.environ.get("COMPOSE_PROJECT_NAME", None)
|
||||
or dir_basename.lower()
|
||||
self.environ.get("COMPOSE_PROJECT_NAME", None) or dir_basename.lower()
|
||||
)
|
||||
project_name = norm_re.sub("", project_name)
|
||||
if not project_name:
|
||||
@ -2089,7 +2023,7 @@ def compose_push(compose, args):
|
||||
|
||||
def build_one(compose, args, cnt):
|
||||
if "build" not in cnt:
|
||||
return None
|
||||
return
|
||||
if getattr(args, "if_not_exists", None):
|
||||
try:
|
||||
img_id = compose.podman.output(
|
||||
@ -2098,7 +2032,7 @@ def build_one(compose, args, cnt):
|
||||
except subprocess.CalledProcessError:
|
||||
img_id = None
|
||||
if img_id:
|
||||
return None
|
||||
return
|
||||
build_desc = cnt["build"]
|
||||
if not hasattr(build_desc, "items"):
|
||||
build_desc = {"context": build_desc}
|
||||
@ -2135,10 +2069,6 @@ def build_one(compose, args, cnt):
|
||||
build_args.append("--pull-always")
|
||||
elif getattr(args, "pull", None):
|
||||
build_args.append("--pull")
|
||||
env = dict(cnt.get("environment", {}))
|
||||
for name, value in env.items():
|
||||
build_args += ["--env", f"{name}" if value is None else f"{name}={value}"]
|
||||
|
||||
args_list = norm_as_list(build_desc.get("args", {}))
|
||||
for build_arg in args_list + args.build_arg:
|
||||
build_args.extend(
|
||||
@ -2154,31 +2084,17 @@ def build_one(compose, args, cnt):
|
||||
|
||||
@cmd_run(podman_compose, "build", "build stack images")
|
||||
def compose_build(compose, args):
|
||||
# keeps the status of the last service/container built
|
||||
status = 0
|
||||
|
||||
def parse_return_code(obj, current_status):
|
||||
if obj and obj.returncode != 0:
|
||||
return obj.returncode
|
||||
return current_status
|
||||
|
||||
if args.services:
|
||||
container_names_by_service = compose.container_names_by_service
|
||||
compose.assert_services(args.services)
|
||||
for service in args.services:
|
||||
cnt = compose.container_by_name[container_names_by_service[service][0]]
|
||||
p = build_one(compose, args, cnt)
|
||||
status = parse_return_code(p, status)
|
||||
if status != 0:
|
||||
return status
|
||||
exit(p.returncode)
|
||||
else:
|
||||
for cnt in compose.containers:
|
||||
p = build_one(compose, args, cnt)
|
||||
status = parse_return_code(p, status)
|
||||
if status != 0:
|
||||
return status
|
||||
|
||||
return status
|
||||
exit(p.returncode)
|
||||
|
||||
|
||||
def create_pods(compose, args): # pylint: disable=unused-argument
|
||||
@ -2440,15 +2356,6 @@ def compose_run(compose, args):
|
||||
)
|
||||
)
|
||||
compose.commands["up"](compose, up_args)
|
||||
|
||||
build_args = argparse.Namespace(
|
||||
services=[args.service],
|
||||
if_not_exists=(not args.build),
|
||||
build_arg=[],
|
||||
**args.__dict__,
|
||||
)
|
||||
compose.commands["build"](compose, build_args)
|
||||
|
||||
# adjust one-off container options
|
||||
name0 = "{}_{}_tmp{}".format(
|
||||
compose.project_name, args.service, random.randrange(0, 65536)
|
||||
@ -2691,37 +2598,6 @@ def compose_kill(compose, args):
|
||||
compose.podman.run([], "kill", podman_args)
|
||||
|
||||
|
||||
@cmd_run(
|
||||
podman_compose,
|
||||
"stats",
|
||||
"Display percentage of CPU, memory, network I/O, block I/O and PIDs for services.",
|
||||
)
|
||||
def compose_stats(compose, args):
|
||||
container_names_by_service = compose.container_names_by_service
|
||||
if not args.services:
|
||||
args.services = container_names_by_service.keys()
|
||||
targets = []
|
||||
podman_args = []
|
||||
if args.interval:
|
||||
podman_args.extend(["--interval", args.interval])
|
||||
if args.format:
|
||||
podman_args.extend(["--format", args.format])
|
||||
if args.no_reset:
|
||||
podman_args.append("--no-reset")
|
||||
if args.no_stream:
|
||||
podman_args.append("--no-stream")
|
||||
|
||||
for service in args.services:
|
||||
targets.extend(container_names_by_service[service])
|
||||
for target in targets:
|
||||
podman_args.append(target)
|
||||
|
||||
try:
|
||||
compose.podman.run([], "stats", podman_args)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
###################
|
||||
# command arguments parsing
|
||||
###################
|
||||
@ -2849,9 +2725,6 @@ def compose_down_parse(parser):
|
||||
|
||||
@cmd_parse(podman_compose, "run")
|
||||
def compose_run_parse(parser):
|
||||
parser.add_argument(
|
||||
"--build", action="store_true", help="Build images before starting containers."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--detach",
|
||||
@ -3114,9 +2987,6 @@ def compose_build_parse(parser):
|
||||
|
||||
@cmd_parse(podman_compose, "config")
|
||||
def compose_config_parse(parser):
|
||||
parser.add_argument(
|
||||
"--no-normalize", help="Don't normalize compose model.", action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--services", help="Print the service names, one per line.", action="store_true"
|
||||
)
|
||||
@ -3172,35 +3042,6 @@ def compose_kill_parse(parser):
|
||||
)
|
||||
|
||||
|
||||
@cmd_parse(podman_compose, ["stats"])
|
||||
def compose_stats_parse(parser):
|
||||
parser.add_argument(
|
||||
"services", metavar="services", nargs="*", default=None, help="service names"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--interval",
|
||||
type=int,
|
||||
help="Time in seconds between stats reports (default 5)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
type=str,
|
||||
help="Pretty-print container statistics to JSON or using a Go template",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-reset",
|
||||
help="Disable resetting the screen between intervals",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-stream",
|
||||
help="Disable streaming stats and only pull the first result",
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
podman_compose.run()
|
||||
|
||||
|
@ -1,168 +0,0 @@
|
||||
import copy
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
from podman_compose import normalize_service, PodmanCompose
|
||||
|
||||
|
||||
test_cases_simple = [
|
||||
({"test": "test"}, {"test": "test"}),
|
||||
({"build": "."}, {"build": {"context": "."}}),
|
||||
({"build": "./dir-1"}, {"build": {"context": "./dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_normalize_service_simple():
|
||||
for test_case, expected in copy.deepcopy(test_cases_simple):
|
||||
test_original = copy.deepcopy(test_case)
|
||||
test_case = normalize_service(test_case)
|
||||
test_result = expected == test_case
|
||||
if not test_result:
|
||||
print("test: ", test_original)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_case)
|
||||
assert test_result
|
||||
|
||||
|
||||
test_cases_sub_dir = [
|
||||
({"test": "test"}, {"test": "test"}),
|
||||
({"build": "."}, {"build": {"context": "./sub_dir/."}}),
|
||||
({"build": "./dir-1"}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./sub_dir", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./sub_dir/dir-1", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_normalize_service_with_sub_dir():
|
||||
for test_case, expected in copy.deepcopy(test_cases_sub_dir):
|
||||
test_original = copy.deepcopy(test_case)
|
||||
test_case = normalize_service(test_case, sub_dir="./sub_dir")
|
||||
test_result = expected == test_case
|
||||
if not test_result:
|
||||
print("test: ", test_original)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_case)
|
||||
assert test_result
|
||||
|
||||
|
||||
test_cases_merges = [
|
||||
({}, {}, {}),
|
||||
({}, {"test": "test"}, {"test": "test"}),
|
||||
({"test": "test"}, {}, {"test": "test"}),
|
||||
({"test": "test-1"}, {"test": "test-2"}, {"test": "test-2"}),
|
||||
({}, {"build": "."}, {"build": {"context": "."}}),
|
||||
({"build": "."}, {}, {"build": {"context": "."}}),
|
||||
({"build": "./dir-1"}, {"build": "./dir-2"}, {"build": {"context": "./dir-2"}}),
|
||||
({}, {"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {}, {"build": {"context": "./dir-1"}}),
|
||||
(
|
||||
{"build": {"context": "./dir-1"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1"]}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV2=2"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1", "ENV2=2"]}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for test_input, test_override, expected_result in copy.deepcopy(test_cases_merges):
|
||||
compose_test_1 = {"services": {"test-service": test_input}}
|
||||
compose_test_2 = {"services": {"test-service": test_override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||
|
||||
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("override: ", test_override)
|
||||
print("expected: ", expected_result)
|
||||
print("actual: ", actual_compose)
|
||||
|
||||
compose_expected = expected_result
|
||||
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = True
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
@ -1,122 +0,0 @@
|
||||
import copy
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
from podman_compose import normalize_service, PodmanCompose
|
||||
|
||||
test_keys = ["command", "entrypoint"]
|
||||
|
||||
test_cases_normalise_pre_merge = [
|
||||
({"$$$": []}, {"$$$": []}),
|
||||
({"$$$": ["sh"]}, {"$$$": ["sh"]}),
|
||||
({"$$$": ["sh", "-c", "date"]}, {"$$$": ["sh", "-c", "date"]}),
|
||||
({"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||
(
|
||||
{"$$$": "bash -c 'sleep infinity'"},
|
||||
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||
),
|
||||
]
|
||||
|
||||
test_cases_merges = [
|
||||
({}, {"$$$": []}, {"$$$": []}),
|
||||
({"$$$": []}, {}, {"$$$": []}),
|
||||
({"$$$": []}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-2"}, {"$$$": []}, {"$$$": []}),
|
||||
({}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh"}, {}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": ["sh-1"]}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": ["sh-2"]}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||
({"$$$": ["sh-1"]}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||
({"$$$": ["sh-1", "sh-2"]}, {"$$$": ["sh-3", "sh-4"]}, {"$$$": ["sh-3", "sh-4"]}),
|
||||
({}, {"$$$": ["sh-3", "sh 4"]}, {"$$$": ["sh-3", "sh 4"]}),
|
||||
({"$$$": "sleep infinity"}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh"}, {"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||
(
|
||||
{},
|
||||
{"$$$": "bash -c 'sleep infinity'"},
|
||||
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def template_to_expression(base, override, expected, key):
|
||||
base_copy = copy.deepcopy(base)
|
||||
override_copy = copy.deepcopy(override)
|
||||
expected_copy = copy.deepcopy(expected)
|
||||
|
||||
expected_copy[key] = expected_copy.pop("$$$")
|
||||
if "$$$" in base:
|
||||
base_copy[key] = base_copy.pop("$$$")
|
||||
if "$$$" in override:
|
||||
override_copy[key] = override_copy.pop("$$$")
|
||||
return base_copy, override_copy, expected_copy
|
||||
|
||||
|
||||
def test_normalize_service():
|
||||
for test_input_template, expected_template in test_cases_normalise_pre_merge:
|
||||
for key in test_keys:
|
||||
test_input, _, expected = template_to_expression(
|
||||
test_input_template, {}, expected_template, key
|
||||
)
|
||||
test_input = normalize_service(test_input)
|
||||
test_result = expected == test_input
|
||||
if not test_result:
|
||||
print("base_template: ", test_input_template)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_input)
|
||||
assert test_result
|
||||
|
||||
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for base_template, override_template, expected_template in copy.deepcopy(
|
||||
test_cases_merges
|
||||
):
|
||||
for key in test_keys:
|
||||
base, override, expected = template_to_expression(
|
||||
base_template, override_template, expected_template, key
|
||||
)
|
||||
compose_test_1 = {"services": {"test-service": base}}
|
||||
compose_test_2 = {"services": {"test-service": override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||
|
||||
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||
|
||||
actual = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual = podman_compose.services["test-service"]
|
||||
if actual != expected:
|
||||
print("compose: ", base)
|
||||
print("override: ", override)
|
||||
print("result: ", expected)
|
||||
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = None
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
@ -1,298 +0,0 @@
|
||||
# pylint: disable=protected-access
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import os
|
||||
import yaml
|
||||
from podman_compose import (
|
||||
normalize_service,
|
||||
normalize,
|
||||
normalize_final,
|
||||
normalize_service_final,
|
||||
PodmanCompose,
|
||||
)
|
||||
|
||||
cwd = os.path.abspath(".")
|
||||
test_cases_simple_normalization = [
|
||||
({"image": "test-image"}, {"image": "test-image"}),
|
||||
(
|
||||
{"build": "."},
|
||||
{
|
||||
"build": {"context": cwd, "dockerfile": "Dockerfile"},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "../relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "../relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{
|
||||
"build": {
|
||||
"context": "/workspace/absolute",
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {
|
||||
"context": ".",
|
||||
},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {"context": "../", "dockerfile": "test-dockerfile"},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "../")),
|
||||
"dockerfile": "test-dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {"context": ".", "dockerfile": "./dev/test-dockerfile"},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./dev/test-dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# [service.build] is normalised after merges
|
||||
#
|
||||
def test_normalize_service_final_returns_absolute_path_in_context() -> None:
|
||||
project_dir = cwd
|
||||
for test_input, expected_service in copy.deepcopy(test_cases_simple_normalization):
|
||||
actual_service = normalize_service_final(test_input, project_dir)
|
||||
assert expected_service == actual_service
|
||||
|
||||
|
||||
def test_normalize_returns_absolute_path_in_context() -> None:
|
||||
project_dir = cwd
|
||||
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||
compose_test = {"services": {"test-service": test_input}}
|
||||
compose_expected = {"services": {"test-service": expected_result}}
|
||||
actual_compose = normalize_final(compose_test, project_dir)
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
#
|
||||
# running full parse over single compose files
|
||||
#
|
||||
def test__parse_compose_file_when_single_compose() -> None:
|
||||
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||
compose_test = {"services": {"test-service": test_input}}
|
||||
dump_yaml(compose_test, "test-compose.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose.yaml"], no_normalize=None)
|
||||
|
||||
podman_compose._parse_compose_file()
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("result: ", expected_result)
|
||||
|
||||
assert expected_result == actual_compose
|
||||
|
||||
|
||||
test_cases_with_merges = [
|
||||
(
|
||||
{},
|
||||
{"build": "."},
|
||||
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "."},
|
||||
{},
|
||||
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": "./relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile-1"}},
|
||||
{"build": {"dockerfile": "test-dockerfile-2"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile-2"}},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1"],
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2"}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1"],
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV2=2"]}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1", "ENV2=2"],
|
||||
}
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# running full parse over merged
|
||||
#
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for test_input, test_override, expected_result in copy.deepcopy(
|
||||
test_cases_with_merges
|
||||
):
|
||||
compose_test_1 = {"services": {"test-service": test_input}}
|
||||
compose_test_2 = {"services": {"test-service": test_override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(
|
||||
podman_compose,
|
||||
["test-compose-1.yaml", "test-compose-2.yaml"],
|
||||
no_normalize=None,
|
||||
)
|
||||
|
||||
podman_compose._parse_compose_file()
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("override: ", test_override)
|
||||
print("result: ", expected_result)
|
||||
compose_expected = expected_result
|
||||
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
def set_args(
|
||||
podman_compose: PodmanCompose, file_names: list[str], no_normalize: bool
|
||||
) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = no_normalize
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
# Path(Path.cwd()/"subdirectory").mkdir(parents=True, exist_ok=True)
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml", "test-compose.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
3
setup.py
3
setup.py
@ -16,11 +16,12 @@ setup(
|
||||
classifiers=[
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Intended Audience :: Developers",
|
||||
"Operating System :: OS Independent",
|
||||
"Development Status :: 3 - Alpha",
|
||||
|
@ -1,22 +0,0 @@
|
||||
# Test podman-compose with build (fail scenario)
|
||||
|
||||
```shell
|
||||
podman-compose build || echo $?
|
||||
```
|
||||
|
||||
expected output would be something like
|
||||
|
||||
```
|
||||
STEP 1/3: FROM busybox
|
||||
STEP 2/3: RUN this_command_does_not_exist
|
||||
/bin/sh: this_command_does_not_exist: not found
|
||||
Error: building at STEP "RUN this_command_does_not_exist": while running runtime: exit status 127
|
||||
|
||||
exit code: 127
|
||||
```
|
||||
|
||||
Expected `podman-compose` exit code:
|
||||
```shell
|
||||
echo $?
|
||||
127
|
||||
```
|
@ -1,3 +0,0 @@
|
||||
FROM busybox
|
||||
RUN this_command_does_not_exist
|
||||
CMD ["sh"]
|
@ -1,5 +0,0 @@
|
||||
version: "3"
|
||||
services:
|
||||
test:
|
||||
build: ./context
|
||||
image: build-fail-img
|
@ -2,10 +2,9 @@
|
||||
|
||||
Defines global pytest fixtures available to all tests.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -1,9 +0,0 @@
|
||||
running the following commands should always give podman-rocks-123
|
||||
|
||||
```
|
||||
podman-compose -f project/container-compose.yaml --env-file env-files/project-1.env up
|
||||
```
|
||||
|
||||
```
|
||||
podman-compose -f $(pwd)/project/container-compose.yaml --env-file $(pwd)/env-files/project-1.env up
|
||||
```
|
@ -1 +0,0 @@
|
||||
ZZVAR1=podman-rocks-123
|
@ -1,9 +0,0 @@
|
||||
services:
|
||||
app:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "sh", "-c", "env | grep ZZ"]
|
||||
tmpfs:
|
||||
- /run
|
||||
- /tmp
|
||||
environment:
|
||||
ZZVAR1: $ZZVAR1
|
@ -1,7 +0,0 @@
|
||||
services:
|
||||
webapp_default:
|
||||
|
||||
webapp_special:
|
||||
image: busybox
|
||||
volumes:
|
||||
- "/data"
|
@ -1,10 +0,0 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
extends:
|
||||
file: common-services.yml
|
||||
service: webapp_default
|
||||
environment:
|
||||
- DEBUG=1
|
||||
cpu_shares: 5
|
@ -1,7 +0,0 @@
|
||||
version: '3.6'
|
||||
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", ".", "-p", "8003"]
|
||||
|
@ -1,4 +0,0 @@
|
||||
version: '3.6'
|
||||
|
||||
include:
|
||||
- docker-compose.base.yaml
|
@ -59,26 +59,3 @@ def test_podman_compose_extends_w_file_subdir():
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b""
|
||||
|
||||
|
||||
def test_podman_compose_extends_w_empty_service():
|
||||
"""
|
||||
Test that podman-compose can execute podman-compose -f <file> up with extended File which
|
||||
includes an empty service. (e.g. if the file is used as placeholder for more complex configurations.)
|
||||
:return:
|
||||
"""
|
||||
main_path = Path(__file__).parent.parent
|
||||
|
||||
command_up = [
|
||||
"python3",
|
||||
str(main_path.joinpath("podman_compose.py")),
|
||||
"-f",
|
||||
str(
|
||||
main_path.joinpath("tests", "extends_w_empty_service", "docker-compose.yml")
|
||||
),
|
||||
"up",
|
||||
"-d",
|
||||
]
|
||||
|
||||
_, _, returncode = capture(command_up)
|
||||
assert 0 == returncode
|
||||
|
@ -3,10 +3,9 @@ test_podman_compose_config.py
|
||||
|
||||
Tests the podman-compose config command which is used to return defined compose services.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
import pytest
|
||||
import os
|
||||
from test_podman_compose import capture
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -24,7 +23,7 @@ def test_config_no_profiles(podman_compose_path, profile_compose_file):
|
||||
"""
|
||||
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file, "config"]
|
||||
|
||||
out, _, return_code = capture(config_cmd)
|
||||
out, err, return_code = capture(config_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
string_output = out.decode("utf-8")
|
||||
@ -64,7 +63,7 @@ def test_config_profiles(
|
||||
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file]
|
||||
config_cmd.extend(profiles)
|
||||
|
||||
out, _, return_code = capture(config_cmd)
|
||||
out, err, return_code = capture(config_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
actual_output = out.decode("utf-8")
|
||||
@ -72,7 +71,7 @@ def test_config_profiles(
|
||||
assert len(expected_services) == 3
|
||||
|
||||
actual_services = {}
|
||||
for service, _ in expected_services.items():
|
||||
for service, expected_check in expected_services.items():
|
||||
actual_services[service] = service in actual_output
|
||||
|
||||
assert expected_services == actual_services
|
||||
|
@ -1,71 +0,0 @@
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture(command):
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
return out, err, proc.returncode
|
||||
|
||||
|
||||
def test_podman_compose_include():
|
||||
"""
|
||||
Test that podman-compose can execute podman-compose -f <file> up with include
|
||||
:return:
|
||||
"""
|
||||
main_path = Path(__file__).parent.parent
|
||||
|
||||
command_up = [
|
||||
"python3",
|
||||
str(main_path.joinpath("podman_compose.py")),
|
||||
"-f",
|
||||
str(main_path.joinpath("tests", "include", "docker-compose.yaml")),
|
||||
"up",
|
||||
"-d",
|
||||
]
|
||||
|
||||
command_check_container = [
|
||||
"podman",
|
||||
"ps",
|
||||
"-a",
|
||||
"--filter",
|
||||
"label=io.podman.compose.project=include",
|
||||
"--format",
|
||||
'"{{.Image}}"',
|
||||
]
|
||||
|
||||
command_container_id = [
|
||||
"podman",
|
||||
"ps",
|
||||
"-a",
|
||||
"--filter",
|
||||
"label=io.podman.compose.project=include",
|
||||
"--format",
|
||||
'"{{.ID}}"',
|
||||
]
|
||||
|
||||
command_down = ["podman", "rm", "--force", "CONTAINER_ID"]
|
||||
|
||||
out, _, returncode = capture(command_up)
|
||||
assert 0 == returncode
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b'"docker.io/library/busybox:latest"\n'
|
||||
# Get container ID to remove it
|
||||
out, _, returncode = capture(command_container_id)
|
||||
assert 0 == returncode
|
||||
assert out != b""
|
||||
container_id = out.decode().strip().replace('"', "")
|
||||
command_down[3] = container_id
|
||||
out, _, returncode = capture(command_down)
|
||||
# cleanup test image(tags)
|
||||
assert 0 == returncode
|
||||
assert out != b""
|
||||
# check container did not exists anymore
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b""
|
@ -3,10 +3,9 @@ test_podman_compose_up_down.py
|
||||
|
||||
Tests the podman compose up and down commands used to create and remove services.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
import pytest
|
||||
import os
|
||||
from test_podman_compose import capture
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -66,7 +65,7 @@ def test_up(podman_compose_path, profile_compose_file, profiles, expected_servic
|
||||
]
|
||||
up_cmd.extend(profiles)
|
||||
|
||||
out, _, return_code = capture(up_cmd)
|
||||
out, err, return_code = capture(up_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
check_cmd = [
|
||||
@ -76,14 +75,14 @@ def test_up(podman_compose_path, profile_compose_file, profiles, expected_servic
|
||||
"--format",
|
||||
'"{{.Names}}"',
|
||||
]
|
||||
out, _, return_code = capture(check_cmd)
|
||||
out, err, return_code = capture(check_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
assert len(expected_services) == 3
|
||||
actual_output = out.decode("utf-8")
|
||||
|
||||
actual_services = {}
|
||||
for service, _ in expected_services.items():
|
||||
for service, expected_check in expected_services.items():
|
||||
actual_services[service] = service in actual_output
|
||||
|
||||
assert expected_services == actual_services
|
||||
|
Loading…
Reference in New Issue
Block a user