3 Commits

Author SHA1 Message Date
1b85ebe506 version 2024-02-06 00:26:51 +03:00
9ed05a23ef add TQDM and implement pool.join 2024-02-06 00:26:51 +03:00
d475260951 add TQDM and implement pool.join 2024-02-06 00:26:51 +03:00
64 changed files with 1632 additions and 3033 deletions

View File

@ -1,4 +0,0 @@
[codespell]
skip = .git,*.pdf,*.svg,requirements.txt,test-requirements.txt
# poped - loved variable name
ignore-words-list = poped

View File

@ -1,19 +0,0 @@
root = true
[*]
indent_style = space
indent_size = tab
tab_width = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
max_line_length = 100
[*.{yml,yaml}]
indent_style = space
indent_size = 2
[*.py]
indent_style = space

View File

@ -1,7 +0,0 @@
## Contributor Checklist:
If this PR adds a new feature that improves compatibility with docker-compose, please add a link
to the exact part of compose spec that the PR touches.
All changes require additional unit tests.

View File

@ -1,20 +0,0 @@
---
name: Codespell
on:
push:
pull_request:
permissions:
contents: read
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Codespell
uses: codespell-project/actions-codespell@v2

41
.github/workflows/pylint.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: Pylint
on:
- push
- pull_request
jobs:
lint-black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install psf/black requirements
run: |
sudo apt-get update
sudo apt-get install -y python3 python3-venv
- uses: psf/black@stable
with:
options: "--check --verbose"
version: "~= 23.3"
lint-pylint:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pylint
- name: Analysing the code with pylint
run: |
python -m compileall podman_compose.py
pylint podman_compose.py
# pylint $(git ls-files '*.py')

39
.github/workflows/pytest.yml vendored Normal file
View File

@ -0,0 +1,39 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: PyTest
on:
push:
branches: [ devel ]
pull_request:
branches: [ devel ]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
sudo apt update && apt install podman
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f test-requirements.txt ]; then pip install -r test-requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
coverage run --source podman_compose -m pytest ./pytests
python -m pytest ./tests
coverage combine
coverage report

View File

@ -1,25 +0,0 @@
name: Static checks
on:
- push
- pull_request
jobs:
static-checks:
runs-on: ubuntu-latest
container:
image: docker.io/library/python:3.11-bookworm
# cgroupns needed to address the following error:
# write /sys/fs/cgroup/cgroup.subtree_control: operation not supported
options: --privileged --cgroupns=host
steps:
- uses: actions/checkout@v4
- name: Analysing the code with ruff
run: |
set -e
pip install -r test-requirements.txt
ruff format --check
ruff check
- name: Analysing the code with pylint
run: |
pylint podman_compose.py

View File

@ -1,40 +0,0 @@
name: Tests
on:
push:
pull_request:
jobs:
test:
strategy:
fail-fast: false
matrix:
python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
runs-on: ubuntu-latest
container:
image: "docker.io/library/python:${{ matrix.python-version }}-bookworm"
# cgroupns needed to address the following error:
# write /sys/fs/cgroup/cgroup.subtree_control: operation not supported
options: --privileged --cgroupns=host
steps:
- uses: actions/checkout@v4
- name: Install dependencies
run: |
set -e
apt update && apt install -y podman
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f test-requirements.txt ]; then pip install -r test-requirements.txt; fi
- name: Run tests in tests/
run: |
python -m unittest tests/*.py
env:
TESTS_DEBUG: 1
- name: Run tests in pytests/
run: |
coverage run --source podman_compose -m unittest pytests/*.py
- name: Report coverage
run: |
coverage combine
coverage report --format=markdown | tee -a $GITHUB_STEP_SUMMARY

2
.gitignore vendored
View File

@ -47,8 +47,6 @@ coverage.xml
*.cover
.hypothesis/
.pytest_cache/
test-compose.yaml
test-compose-?.yaml
# Translations
*.mo

View File

@ -30,7 +30,3 @@ repos:
"-sn", # Don't display the score
"--rcfile=.pylintrc", # Link to your config file
]
- repo: https://github.com/codespell-project/codespell
rev: v2.2.5
hooks:
- id: codespell

View File

@ -1,89 +1,70 @@
# Contributing to podman-compose
## Who can contribute?
## Who can contribute?
- Users that found a bug,
- Users that want to propose new functionalities or enhancements,
- Users that want to help other users to troubleshoot their environments,
- Developers that want to fix bugs,
- Developers that want to implement new functionalities or enhancements.
- Users that found a bug
- Users that wants to propose new functionalities or enhancements
- Users that want to help other users to troubleshoot their environments
- Developers that want to fix bugs
- Developers that want to implement new functionalities or enhancements
## Branches
Please request your pull request to be merged into the `devel` branch.
Please request your PR to be merged into the `devel` branch.
Changes to the `stable` branch are managed by the repository maintainers.
## Development environment setup
Note: Some steps are OPTIONAL but all are RECOMMENDED.
1. Fork the project repository and clone it:
```shell
$ git clone https://github.com/USERNAME/podman-compose.git
$ cd podman-compose
```
2. (OPTIONAL) Create a Python virtual environment. Example using
[virtualenv wrapper](https://virtualenvwrapper.readthedocs.io/en/latest/):
```shell
$ mkvirtualenv podman-compose
```
3. Install the project runtime and development requirements:
```shell
$ pip install '.[devel]'
```
4. (OPTIONAL) Install `pre-commit` git hook scripts
(https://pre-commit.com/#3-install-the-git-hook-scripts):
```shell
$ pre-commit install
```
5. Create a new branch, develop and add tests when possible.
6. Run linting and testing before committing code. Ensure all the hooks are passing.
```shell
$ pre-commit run --all-files
```
7. Run code coverage:
```shell
$ coverage run --source podman_compose -m unittest pytests/*.py
$ python -m unittest tests/*.py
$ coverage combine
$ coverage report
$ coverage html
```
8. Commit your code to your fork's branch.
- Make sure you include a `Signed-off-by` message in your commits.
Read [this guide](https://github.com/containers/common/blob/main/CONTRIBUTING.md#sign-your-prs)
to learn how to sign your commits.
- In the commit message, reference the Issue ID that your code fixes and a brief description of
the changes.
Example: `Fixes #516: Allow empty network`
9. Open a pull request to `containers/podman-compose:devel` and wait for a maintainer to review your
work.
1. Fork the project repo and clone it
```shell
$ git clone https://github.com/USERNAME/podman-compose.git
$ cd podman-compose
```
1. (OPTIONAL) Create a python virtual environment. Example using [virtualenv wrapper](https://virtualenvwrapper.readthedocs.io/en/latest/):
```shell
mkvirtualenv podman-compose
```
2. Install the project runtime and development requirements
```shell
$ pip install '.[devel]'
```
3. (OPTIONAL) Install `pre-commit` git hook scripts (https://pre-commit.com/#3-install-the-git-hook-scripts)
```shell
$ pre-commit install
```
4. Create a new branch, develop and add tests when possible
5. Run linting & testing before commiting code. Ensure all the hooks are passing.
```shell
$ pre-commit run --all-files
```
6. Run code coverage
```shell
coverage run --source podman_compose -m pytest ./pytests
python -m pytest ./tests
coverage combine
coverage report
coverage html
```
7. Commit your code to your fork's branch.
- Make sure you include a `Signed-off-by` message in your commits. Read [this guide](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) to learn how to sign your commits
- In the commit message reference the Issue ID that your code fixes and a brief description of the changes. Example: `Fixes #516: allow empty network`
7. Open a PR to `containers/podman-compose:devel` and wait for a maintainer to review your work.
## Adding new commands
To add a command, you need to add a function that is decorated with `@cmd_run`.
To add a command you need to add a function that is decorated
with `@cmd_run` passing the compose instance, command name and
description. This function must be declared `async` the wrapped
function should accept two arguments the compose instance and
the command-specific arguments (resulted from python's `argparse`
package) inside that command you can run PodMan like this
`await compose.podman.run(['inspect', 'something'])`and inside
that function you can access `compose.pods` and `compose.containers`
...etc. Here is an example
The decorated function must be declared `async` and should accept two arguments: The compose
instance and the command-specific arguments (resulted from the Python's `argparse` package).
In this function, you can run Podman (e.g. `await compose.podman.run(['inspect', 'something'])`),
access `compose.pods`, `compose.containers` etc.
Here is an example:
```python
```
@cmd_run(podman_compose, 'build', 'build images defined in the stack')
async def compose_build(compose, args):
await compose.podman.run(['build', 'something'])
@ -91,36 +72,31 @@ async def compose_build(compose, args):
## Command arguments parsing
To add arguments to be parsed by a command, you need to add a function that is decorated with
`@cmd_parse` which accepts the compose instance and the command's name (as a string list or as a
single string).
Add a function that accept `parser` which is an instance from `argparse`.
In side that function you can call `parser.add_argument()`.
The function decorated with `@cmd_parse` accepting the compose instance,
and command names (as a list or as a string).
You can do this multiple times.
The decorated function should accept a single argument: An instance of `argparse`.
Here is an example
In this function, you can call `parser.add_argument()` to add a new argument to the command.
Note you can add such a function multiple times.
Here is an example:
```python
```
@cmd_parse(podman_compose, 'build')
def compose_build_parse(parser):
parser.add_argument("--pull",
help="attempt to pull a newer version of the image", action='store_true')
parser.add_argument("--pull-always",
help="Attempt to pull a newer version of the image, "
"raise an error even if the image is present locally.",
action='store_true')
help="attempt to pull a newer version of the image, Raise an error even if the image is present locally.", action='store_true')
```
NOTE: `@cmd_parse` should be after `@cmd_run`.
NOTE: `@cmd_parse` should be after `@cmd_run`
## Calling a command from another one
## Calling a command from inside another
If you need to call `podman-compose down` from `podman-compose up`, do something like:
If you need to call `podman-compose down` from inside `podman-compose up`
do something like:
```python
```
@cmd_run(podman_compose, 'up', 'up desc')
async def compose_up(compose, args):
await compose.commands['down'](compose, args)
@ -128,14 +104,19 @@ async def compose_up(compose, args):
await compose.commands['down'](argparse.Namespace(foo=123))
```
## Missing Commands (help needed)
## Missing Commands (help needed)
```
bundle Generate a Docker bundle from the Compose file
config Validate and view the Compose file
create Create services
events Receive real time events from containers
images List images
logs View output from containers
port Print the public port for a port binding
ps List containers
rm Remove stopped containers
run Run a one-off command
scale Set number of containers for a service
top Display the running processes
```

View File

@ -1,5 +1,6 @@
# Podman Compose
## [![Tests](https://github.com/containers/podman-compose/actions/workflows/test.yml/badge.svg)](https://github.com/containers/podman-compose/actions/workflows/test.yml)
## [![Pylint Test: ](https://github.com/containers/podman-compose/actions/workflows/pylint.yml/badge.svg)](https://github.com/containers/podman-compose/actions/workflows/pylint.yml) [![Unit tests PyTest](https://github.com/containers/podman-compose/actions/workflows/pytest.yml/badge.svg)](https://github.com/containers/podman-compose/actions/workflows/pytest.yml)
An implementation of [Compose Spec](https://compose-spec.io/) with [Podman](https://podman.io/) backend.
This project focuses on:
@ -102,11 +103,11 @@ There is also AWX 17.1.0
Inside `tests/` directory we have many useless docker-compose stacks
that are meant to test as many cases as we can to make sure we are compatible
### Unit tests with unittest
run a unittest with following command
### Unit tests with pytest
run a pytest with following command
```shell
python -m unittest pytests/*.py
python -m pytest pytests
```
# Contributing guide

View File

@ -1,33 +0,0 @@
Version v1.1.0 (2024-04-17)
===========================
Bug fixes
---------
- Fixed support for values with equals sign in `-e` argument of `run` and `exec` commands.
- Fixed duplicate arguments being emitted in `stop` and `restart` commands.
- Removed extraneous debug output. `--verbose` flag has been added to preserve verbose output.
- Links aliases are now added to service aliases.
- Fixed image build process to use defined environmental variables.
- Empty list is now allowed to be `COMMAND` and `ENTRYPOINT`.
- Environment files are now resolved relative to current working directory.
- Exit code of container build is now preserved as return code of `build` command.
New features
------------
- Added support for `uidmap`, `gidmap`, `http_proxy` and `runtime` service configuration keys.
- Added support for `enable_ipv6` network configuration key.
- Added `--parallel` option to support parallel pulling and building of images.
- Implemented support for maps in `sysctls` container configuration key.
- Implemented `stats` command.
- Added `--no-normalize` flag to `config` command.
- Added support for `include` global configuration key.
- Added support for `build` command.
- Added support to start containers with multiple networks.
- Added support for `profile` argument.
- Added support for starting podman in existing network namespace.
- Added IPAM driver support.
- Added support for file secrets being passed to `podman build` via `--secret` argument.
- Added support for multiple networks with separately specified IP and MAC address.
- Added support for `service.build.ulimits` when building image.

View File

@ -1,67 +0,0 @@
# Podman specific extensions to the docker-compose format
Podman-compose supports the following extension to the docker-compose format.
## Per-network MAC-addresses
Generic docker-compose files support specification of the MAC address on the container level. If the
container has multiple network interfaces, the specified MAC address is applied to the first
specified network.
Podman-compose in addition supports the specification of MAC addresses on a per-network basis. This
is done by adding a `x-podman.mac_address` key to the network configuration in the container. The
value of the `x-podman.mac_address` key is the MAC address to be used for the network interface.
Specifying a MAC address for the container and for individual networks at the same time is not
supported.
Example:
```yaml
---
version: "3"
networks:
net0:
driver: "bridge"
ipam:
config:
- subnet: "192.168.0.0/24"
net1:
driver: "bridge"
ipam:
config:
- subnet: "192.168.1.0/24"
services:
webserver
image: "busybox"
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc", "-p", "8001"]
networks:
net0:
ipv4_address: "192.168.0.10"
x-podman.mac_address: "02:aa:aa:aa:aa:aa"
net1:
ipv4_address: "192.168.1.10"
x-podman.mac_address: "02:bb:bb:bb:bb:bb"
```
## Podman-specific network modes
Generic docker-compose supports the following values for `network-mode` for a container:
- `bridge`
- `host`
- `none`
- `service`
- `container`
In addition, podman-compose supports the following podman-specific values for `network-mode`:
- `slirp4netns[:<options>,...]`
- `ns:<options>`
- `pasta[:<options>,...]`
- `private`
The options to the network modes are passed to the `--network` option of the `podman create` command
as-is.

View File

@ -1,7 +1,7 @@
# pylint: disable=import-error
# pylint: disable=unused-import
import asyncio # noqa: F401
import os
import asyncio # noqa: F401
import aioredis
from aiohttp import web

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
[tool.ruff]
line-length = 100
target-version = "py38"
[tool.ruff.lint]
select = ["W", "E", "F", "I"]
ignore = [
]
[tool.ruff.lint.isort]
force-single-line = true
[tool.ruff.format]
preview = true # needed for quote-style
quote-style = "preserve"

View File

View File

@ -1,111 +1,128 @@
# SPDX-License-Identifier: GPL-2.0
from __future__ import annotations
import argparse
import copy
import os
import unittest
import argparse
import yaml
from parameterized import parameterized
from podman_compose import PodmanCompose
from podman_compose import normalize_service
from podman_compose import normalize_service, PodmanCompose
class TestCanMergeBuild(unittest.TestCase):
@parameterized.expand([
({"test": "test"}, {"test": "test"}),
({"build": "."}, {"build": {"context": "."}}),
({"build": "./dir-1"}, {"build": {"context": "./dir-1"}}),
({"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
(
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
),
])
def test_simple(self, input, expected):
self.assertEqual(normalize_service(input), expected)
test_cases_simple = [
({"test": "test"}, {"test": "test"}),
({"build": "."}, {"build": {"context": "."}}),
({"build": "./dir-1"}, {"build": {"context": "./dir-1"}}),
({"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
(
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
),
]
@parameterized.expand([
({"test": "test"}, {"test": "test"}),
({"build": "."}, {"build": {"context": "./sub_dir/."}}),
({"build": "./dir-1"}, {"build": {"context": "./sub_dir/dir-1"}}),
({"build": {"context": "./dir-1"}}, {"build": {"context": "./sub_dir/dir-1"}}),
(
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"context": "./sub_dir", "dockerfile": "dockerfile-1"}},
),
(
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
{"build": {"context": "./sub_dir/dir-1", "dockerfile": "dockerfile-1"}},
),
])
def test_normalize_service_with_sub_dir(self, input, expected):
self.assertEqual(normalize_service(input, sub_dir="./sub_dir"), expected)
@parameterized.expand([
({}, {}, {}),
({}, {"test": "test"}, {"test": "test"}),
({"test": "test"}, {}, {"test": "test"}),
({"test": "test-1"}, {"test": "test-2"}, {"test": "test-2"}),
({}, {"build": "."}, {"build": {"context": "."}}),
({"build": "."}, {}, {"build": {"context": "."}}),
({"build": "./dir-1"}, {"build": "./dir-2"}, {"build": {"context": "./dir-2"}}),
({}, {"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
({"build": {"context": "./dir-1"}}, {}, {"build": {"context": "./dir-1"}}),
(
{"build": {"context": "./dir-1"}},
{"build": {"context": "./dir-2"}},
{"build": {"context": "./dir-2"}},
),
(
{},
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"dockerfile": "dockerfile-1"}},
{},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-2"}},
{"build": {"dockerfile": "./dockerfile-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"context": "./dir-2"}},
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-1"}},
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
),
(
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1"]}},
),
(
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV2=2"]}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1", "ENV2=2"]}},
),
])
def test_parse_compose_file_when_multiple_composes(self, input, override, expected):
compose_test_1 = {"services": {"test-service": input}}
compose_test_2 = {"services": {"test-service": override}}
def test_normalize_service_simple():
for test_case, expected in copy.deepcopy(test_cases_simple):
test_original = copy.deepcopy(test_case)
test_case = normalize_service(test_case)
test_result = expected == test_case
if not test_result:
print("test: ", test_original)
print("expected: ", expected)
print("actual: ", test_case)
assert test_result
test_cases_sub_dir = [
({"test": "test"}, {"test": "test"}),
({"build": "."}, {"build": {"context": "./sub_dir/."}}),
({"build": "./dir-1"}, {"build": {"context": "./sub_dir/dir-1"}}),
({"build": {"context": "./dir-1"}}, {"build": {"context": "./sub_dir/dir-1"}}),
(
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"context": "./sub_dir", "dockerfile": "dockerfile-1"}},
),
(
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
{"build": {"context": "./sub_dir/dir-1", "dockerfile": "dockerfile-1"}},
),
]
def test_normalize_service_with_sub_dir():
for test_case, expected in copy.deepcopy(test_cases_sub_dir):
test_original = copy.deepcopy(test_case)
test_case = normalize_service(test_case, sub_dir="./sub_dir")
test_result = expected == test_case
if not test_result:
print("test: ", test_original)
print("expected: ", expected)
print("actual: ", test_case)
assert test_result
test_cases_merges = [
({}, {}, {}),
({}, {"test": "test"}, {"test": "test"}),
({"test": "test"}, {}, {"test": "test"}),
({"test": "test-1"}, {"test": "test-2"}, {"test": "test-2"}),
({}, {"build": "."}, {"build": {"context": "."}}),
({"build": "."}, {}, {"build": {"context": "."}}),
({"build": "./dir-1"}, {"build": "./dir-2"}, {"build": {"context": "./dir-2"}}),
({}, {"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
({"build": {"context": "./dir-1"}}, {}, {"build": {"context": "./dir-1"}}),
(
{"build": {"context": "./dir-1"}},
{"build": {"context": "./dir-2"}},
{"build": {"context": "./dir-2"}},
),
(
{},
{"build": {"dockerfile": "dockerfile-1"}},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"dockerfile": "dockerfile-1"}},
{},
{"build": {"dockerfile": "dockerfile-1"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-2"}},
{"build": {"dockerfile": "./dockerfile-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"context": "./dir-2"}},
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-1"}},
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
),
(
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
),
(
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-1"}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1"]}},
),
(
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV2=2"]}},
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1", "ENV2=2"]}},
),
]
def test__parse_compose_file_when_multiple_composes() -> None:
for test_input, test_override, expected_result in copy.deepcopy(test_cases_merges):
compose_test_1 = {"services": {"test-service": test_input}}
compose_test_2 = {"services": {"test-service": test_override}}
dump_yaml(compose_test_1, "test-compose-1.yaml")
dump_yaml(compose_test_2, "test-compose-2.yaml")
@ -118,7 +135,15 @@ class TestCanMergeBuild(unittest.TestCase):
if podman_compose.services:
podman_compose.services["test-service"].pop("_deps")
actual_compose = podman_compose.services["test-service"]
self.assertEqual(actual_compose, expected)
if actual_compose != expected_result:
print("compose: ", test_input)
print("override: ", test_override)
print("expected: ", expected_result)
print("actual: ", actual_compose)
compose_expected = expected_result
assert compose_expected == actual_compose
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:

View File

@ -1,63 +1,78 @@
# SPDX-License-Identifier: GPL-2.0
from __future__ import annotations
import argparse
import copy
import os
import unittest
import argparse
import yaml
from parameterized import parameterized
from podman_compose import PodmanCompose
from podman_compose import normalize_service
from podman_compose import normalize_service, PodmanCompose
test_keys = ["command", "entrypoint"]
test_cases_normalise_pre_merge = [
({"$$$": []}, {"$$$": []}),
({"$$$": ["sh"]}, {"$$$": ["sh"]}),
({"$$$": ["sh", "-c", "date"]}, {"$$$": ["sh", "-c", "date"]}),
({"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
(
{"$$$": "bash -c 'sleep infinity'"},
{"$$$": ["bash", "-c", "sleep infinity"]},
),
]
class TestCanMergeCmdEnt(unittest.TestCase):
@parameterized.expand([
({"$$$": []}, {"$$$": []}),
({"$$$": ["sh"]}, {"$$$": ["sh"]}),
({"$$$": ["sh", "-c", "date"]}, {"$$$": ["sh", "-c", "date"]}),
({"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
(
{"$$$": "bash -c 'sleep infinity'"},
{"$$$": ["bash", "-c", "sleep infinity"]},
),
])
def test_normalize_service(self, input_template, expected_template):
test_cases_merges = [
({}, {"$$$": []}, {"$$$": []}),
({"$$$": []}, {}, {"$$$": []}),
({"$$$": []}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": "sh-2"}, {"$$$": []}, {"$$$": []}),
({}, {"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sh"}, {}, {"$$$": ["sh"]}),
({"$$$": "sh-1"}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": ["sh-1"]}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": "sh-1"}, {"$$$": ["sh-2"]}, {"$$$": ["sh-2"]}),
({"$$$": "sh-1"}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
({"$$$": ["sh-1"]}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
({"$$$": ["sh-1", "sh-2"]}, {"$$$": ["sh-3", "sh-4"]}, {"$$$": ["sh-3", "sh-4"]}),
({}, {"$$$": ["sh-3", "sh 4"]}, {"$$$": ["sh-3", "sh 4"]}),
({"$$$": "sleep infinity"}, {"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sh"}, {"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
(
{},
{"$$$": "bash -c 'sleep infinity'"},
{"$$$": ["bash", "-c", "sleep infinity"]},
),
]
def template_to_expression(base, override, expected, key):
base_copy = copy.deepcopy(base)
override_copy = copy.deepcopy(override)
expected_copy = copy.deepcopy(expected)
expected_copy[key] = expected_copy.pop("$$$")
if "$$$" in base:
base_copy[key] = base_copy.pop("$$$")
if "$$$" in override:
override_copy[key] = override_copy.pop("$$$")
return base_copy, override_copy, expected_copy
def test_normalize_service():
for test_input_template, expected_template in test_cases_normalise_pre_merge:
for key in test_keys:
test_input, _, expected = template_to_expression(
input_template, {}, expected_template, key
test_input_template, {}, expected_template, key
)
self.assertEqual(normalize_service(test_input), expected)
test_input = normalize_service(test_input)
test_result = expected == test_input
if not test_result:
print("base_template: ", test_input_template)
print("expected: ", expected)
print("actual: ", test_input)
assert test_result
@parameterized.expand([
({}, {"$$$": []}, {"$$$": []}),
({"$$$": []}, {}, {"$$$": []}),
({"$$$": []}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": "sh-2"}, {"$$$": []}, {"$$$": []}),
({}, {"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sh"}, {}, {"$$$": ["sh"]}),
({"$$$": "sh-1"}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": ["sh-1"]}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
({"$$$": "sh-1"}, {"$$$": ["sh-2"]}, {"$$$": ["sh-2"]}),
({"$$$": "sh-1"}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
({"$$$": ["sh-1"]}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
({"$$$": ["sh-1", "sh-2"]}, {"$$$": ["sh-3", "sh-4"]}, {"$$$": ["sh-3", "sh-4"]}),
({}, {"$$$": ["sh-3", "sh 4"]}, {"$$$": ["sh-3", "sh 4"]}),
({"$$$": "sleep infinity"}, {"$$$": "sh"}, {"$$$": ["sh"]}),
({"$$$": "sh"}, {"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
(
{},
{"$$$": "bash -c 'sleep infinity'"},
{"$$$": ["bash", "-c", "sleep infinity"]},
),
])
def test_parse_compose_file_when_multiple_composes(
self, base_template, override_template, expected_template
def test__parse_compose_file_when_multiple_composes() -> None:
for base_template, override_template, expected_template in copy.deepcopy(
test_cases_merges
):
for key in test_keys:
base, override, expected = template_to_expression(
@ -77,20 +92,12 @@ class TestCanMergeCmdEnt(unittest.TestCase):
if podman_compose.services:
podman_compose.services["test-service"].pop("_deps")
actual = podman_compose.services["test-service"]
self.assertEqual(actual, expected)
if actual != expected:
print("compose: ", base)
print("override: ", override)
print("result: ", expected)
def template_to_expression(base, override, expected, key):
base_copy = copy.deepcopy(base)
override_copy = copy.deepcopy(override)
expected_copy = copy.deepcopy(expected)
expected_copy[key] = expected_copy.pop("$$$")
if "$$$" in base:
base_copy[key] = base_copy.pop("$$$")
if "$$$" in override:
override_copy[key] = override_copy.pop("$$$")
return base_copy, override_copy, expected_copy
assert expected == actual
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:

View File

@ -1,46 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
import argparse
import unittest
from podman_compose import compose_exec_args
class TestComposeExecArgs(unittest.TestCase):
def test_minimal(self):
cnt = get_minimal_container()
args = get_minimal_args()
result = compose_exec_args(cnt, "container_name", args)
expected = ["--interactive", "--tty", "container_name"]
self.assertEqual(result, expected)
def test_additional_env_value_equals(self):
cnt = get_minimal_container()
args = get_minimal_args()
args.env = ["key=valuepart1=valuepart2"]
result = compose_exec_args(cnt, "container_name", args)
expected = [
"--interactive",
"--tty",
"--env",
"key=valuepart1=valuepart2",
"container_name",
]
self.assertEqual(result, expected)
def get_minimal_container():
return {}
def get_minimal_args():
return argparse.Namespace(
T=None,
cnt_command=None,
env=None,
privileged=None,
user=None,
workdir=None,
)

View File

@ -1,60 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
import argparse
import unittest
from podman_compose import PodmanCompose
from podman_compose import compose_run_update_container_from_args
class TestComposeRunUpdateContainerFromArgs(unittest.TestCase):
def test_minimal(self):
cnt = get_minimal_container()
compose = get_minimal_compose()
args = get_minimal_args()
compose_run_update_container_from_args(compose, cnt, args)
expected_cnt = {"name": "default_name", "tty": True}
self.assertEqual(cnt, expected_cnt)
def test_additional_env_value_equals(self):
cnt = get_minimal_container()
compose = get_minimal_compose()
args = get_minimal_args()
args.env = ["key=valuepart1=valuepart2"]
compose_run_update_container_from_args(compose, cnt, args)
expected_cnt = {
"environment": {
"key": "valuepart1=valuepart2",
},
"name": "default_name",
"tty": True,
}
self.assertEqual(cnt, expected_cnt)
def get_minimal_container():
return {}
def get_minimal_compose():
return PodmanCompose()
def get_minimal_args():
return argparse.Namespace(
T=None,
cnt_command=None,
entrypoint=None,
env=None,
name="default_name",
rm=None,
service=None,
service_ports=None,
user=None,
volume=None,
workdir=None,
)

View File

@ -1,163 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
import unittest
from unittest import mock
from podman_compose import container_to_args
def create_compose_mock(project_name="test_project_name"):
compose = mock.Mock()
compose.project_name = project_name
compose.dirname = "test_dirname"
compose.container_names_by_service.get = mock.Mock(return_value=None)
compose.prefer_volume_over_mount = False
compose.default_net = None
compose.networks = {}
return compose
def get_minimal_container():
return {
"name": "project_name_service_name1",
"service_name": "service_name",
"image": "busybox",
}
class TestContainerToArgs(unittest.IsolatedAsyncioTestCase):
async def test_minimal(self):
c = create_compose_mock()
cnt = get_minimal_container()
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--network=bridge",
"--network-alias=service_name",
"busybox",
],
)
async def test_runtime(self):
c = create_compose_mock()
cnt = get_minimal_container()
cnt["runtime"] = "runsc"
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--network=bridge",
"--network-alias=service_name",
"--runtime",
"runsc",
"busybox",
],
)
async def test_sysctl_list(self):
c = create_compose_mock()
cnt = get_minimal_container()
cnt["sysctls"] = [
"net.core.somaxconn=1024",
"net.ipv4.tcp_syncookies=0",
]
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--network=bridge",
"--network-alias=service_name",
"--sysctl",
"net.core.somaxconn=1024",
"--sysctl",
"net.ipv4.tcp_syncookies=0",
"busybox",
],
)
async def test_sysctl_map(self):
c = create_compose_mock()
cnt = get_minimal_container()
cnt["sysctls"] = {
"net.core.somaxconn": 1024,
"net.ipv4.tcp_syncookies": 0,
}
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--network=bridge",
"--network-alias=service_name",
"--sysctl",
"net.core.somaxconn=1024",
"--sysctl",
"net.ipv4.tcp_syncookies=0",
"busybox",
],
)
async def test_sysctl_wrong_type(self):
c = create_compose_mock()
cnt = get_minimal_container()
# check whether wrong types are correctly rejected
for wrong_type in [True, 0, 0.0, "wrong", ()]:
with self.assertRaises(TypeError):
cnt["sysctls"] = wrong_type
await container_to_args(c, cnt)
async def test_pid(self):
c = create_compose_mock()
cnt = get_minimal_container()
cnt["pid"] = "host"
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--network=bridge",
"--network-alias=service_name",
"--pid",
"host",
"busybox",
],
)
async def test_http_proxy(self):
c = create_compose_mock()
cnt = get_minimal_container()
cnt["http_proxy"] = False
args = await container_to_args(c, cnt)
self.assertEqual(
args,
[
"--name=project_name_service_name1",
"-d",
"--http-proxy=false",
"--network=bridge",
"--network-alias=service_name",
"busybox",
],
)

View File

@ -1,298 +0,0 @@
import unittest
from parameterized import parameterized
from podman_compose import get_net_args
from .test_container_to_args import create_compose_mock
PROJECT_NAME = "test_project_name"
SERVICE_NAME = "service_name"
CONTAINER_NAME = f"{PROJECT_NAME}_{SERVICE_NAME}_1"
def get_networked_compose(num_networks=1):
compose = create_compose_mock(PROJECT_NAME)
for network in range(num_networks):
compose.networks[f"net{network}"] = {
"driver": "bridge",
"ipam": {
"config": [
{"subnet": f"192.168.{network}.0/24"},
{"subnet": f"fd00:{network}::/64"},
]
},
"enable_ipv6": True,
}
return compose
def get_minimal_container():
return {
"name": CONTAINER_NAME,
"service_name": SERVICE_NAME,
"image": "busybox",
}
class TestGetNetArgs(unittest.TestCase):
def test_minimal(self):
compose = get_networked_compose()
container = get_minimal_container()
expected_args = [
"--network=bridge",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_one_net(self):
compose = get_networked_compose()
container = get_minimal_container()
container["networks"] = {"net0": {}}
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_alias(self):
compose = get_networked_compose()
container = get_minimal_container()
container["networks"] = {"net0": {}}
container["_aliases"] = ["alias1", "alias2"]
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--network-alias={SERVICE_NAME}",
"--network-alias=alias1",
"--network-alias=alias2",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_one_ipv4(self):
ip = "192.168.0.42"
compose = get_networked_compose()
container = get_minimal_container()
container["networks"] = {"net0": {"ipv4_address": ip}}
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--ip={ip}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertEqual(expected_args, args)
def test_one_ipv6(self):
ipv6_address = "fd00:0::42"
compose = get_networked_compose()
container = get_minimal_container()
container["networks"] = {"net0": {"ipv6_address": ipv6_address}}
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--ip6={ipv6_address}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_one_mac(self):
mac = "00:11:22:33:44:55"
compose = get_networked_compose()
container = get_minimal_container()
container["networks"] = {"net0": {}}
container["mac_address"] = mac
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--mac-address={mac}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_one_mac_two_nets(self):
mac = "00:11:22:33:44:55"
compose = get_networked_compose(num_networks=6)
container = get_minimal_container()
container["networks"] = {"net0": {}, "net1": {}}
container["mac_address"] = mac
expected_args = [
f"--network={PROJECT_NAME}_net0:mac={mac}",
f"--network={PROJECT_NAME}_net1",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_two_nets_as_dict(self):
compose = get_networked_compose(num_networks=2)
container = get_minimal_container()
container["networks"] = {"net0": {}, "net1": {}}
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--network={PROJECT_NAME}_net1",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_two_nets_as_list(self):
compose = get_networked_compose(num_networks=2)
container = get_minimal_container()
container["networks"] = ["net0", "net1"]
expected_args = [
f"--network={PROJECT_NAME}_net0",
f"--network={PROJECT_NAME}_net1",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_two_ipv4(self):
ip0 = "192.168.0.42"
ip1 = "192.168.1.42"
compose = get_networked_compose(num_networks=2)
container = get_minimal_container()
container["networks"] = {"net0": {"ipv4_address": ip0}, "net1": {"ipv4_address": ip1}}
expected_args = [
f"--network={PROJECT_NAME}_net0:ip={ip0}",
f"--network={PROJECT_NAME}_net1:ip={ip1}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_two_ipv6(self):
ip0 = "fd00:0::42"
ip1 = "fd00:1::42"
compose = get_networked_compose(num_networks=2)
container = get_minimal_container()
container["networks"] = {"net0": {"ipv6_address": ip0}, "net1": {"ipv6_address": ip1}}
expected_args = [
f"--network={PROJECT_NAME}_net0:ip={ip0}",
f"--network={PROJECT_NAME}_net1:ip={ip1}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
# custom extension; not supported by docker-compose
def test_two_mac(self):
mac0 = "00:00:00:00:00:01"
mac1 = "00:00:00:00:00:02"
compose = get_networked_compose(num_networks=2)
container = get_minimal_container()
container["networks"] = {
"net0": {"x-podman.mac_address": mac0},
"net1": {"x-podman.mac_address": mac1},
}
expected_args = [
f"--network={PROJECT_NAME}_net0:mac={mac0}",
f"--network={PROJECT_NAME}_net1:mac={mac1}",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_mixed_mac(self):
ip4_0 = "192.168.0.42"
ip4_1 = "192.168.1.42"
ip4_2 = "192.168.2.42"
mac_0 = "00:00:00:00:00:01"
mac_1 = "00:00:00:00:00:02"
compose = get_networked_compose(num_networks=3)
container = get_minimal_container()
container["networks"] = {
"net0": {"ipv4_address": ip4_0},
"net1": {"ipv4_address": ip4_1, "x-podman.mac_address": mac_0},
"net2": {"ipv4_address": ip4_2},
}
container["mac_address"] = mac_1
expected_exception = (
r"specifying mac_address on both container and network level " r"is not supported"
)
self.assertRaisesRegex(RuntimeError, expected_exception, get_net_args, compose, container)
def test_mixed_config(self):
ip4_0 = "192.168.0.42"
ip4_1 = "192.168.1.42"
ip6_0 = "fd00:0::42"
ip6_2 = "fd00:2::42"
mac = "00:11:22:33:44:55"
compose = get_networked_compose(num_networks=4)
container = get_minimal_container()
container["networks"] = {
"net0": {"ipv4_address": ip4_0, "ipv6_address": ip6_0},
"net1": {"ipv4_address": ip4_1},
"net2": {"ipv6_address": ip6_2},
"net3": {},
}
container["mac_address"] = mac
expected_args = [
f"--network={PROJECT_NAME}_net0:ip={ip4_0},ip={ip6_0},mac={mac}",
f"--network={PROJECT_NAME}_net1:ip={ip4_1}",
f"--network={PROJECT_NAME}_net2:ip={ip6_2}",
f"--network={PROJECT_NAME}_net3",
f"--network-alias={SERVICE_NAME}",
]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
@parameterized.expand([
("bridge", ["--network=bridge", f"--network-alias={SERVICE_NAME}"]),
("host", ["--network=host"]),
("none", []),
("slirp4netns", ["--network=slirp4netns"]),
("slirp4netns:cidr=10.42.0.0/24", ["--network=slirp4netns:cidr=10.42.0.0/24"]),
("private", ["--network=private"]),
("pasta", ["--network=pasta"]),
("pasta:--ipv4-only,-a,10.0.2.0", ["--network=pasta:--ipv4-only,-a,10.0.2.0"]),
("ns:my_namespace", ["--network=ns:my_namespace"]),
("container:my_container", ["--network=container:my_container"]),
])
def test_network_modes(self, network_mode, expected_args):
compose = get_networked_compose()
container = get_minimal_container()
container["network_mode"] = network_mode
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)
def test_network_mode_invalid(self):
compose = get_networked_compose()
container = get_minimal_container()
container["network_mode"] = "invalid_mode"
with self.assertRaises(SystemExit):
get_net_args(compose, container)
def test_network__mode_service(self):
compose = get_networked_compose()
compose.container_names_by_service = {
"service_1": ["container_1"],
"service_2": ["container_2"],
}
container = get_minimal_container()
container["network_mode"] = "service:service_2"
expected_args = ["--network=container:container_2"]
args = get_net_args(compose, container)
self.assertListEqual(expected_args, args)

View File

@ -1,167 +0,0 @@
import unittest
from podman_compose import get_network_create_args
class TestGetNetworkCreateArgs(unittest.TestCase):
def test_minimal(self):
net_desc = {
"labels": [],
"internal": False,
"driver": None,
"driver_opts": {},
"ipam": {"config": []},
"enable_ipv6": False,
}
proj_name = "test_project"
net_name = "test_network"
expected_args = [
"create",
"--label",
f"io.podman.compose.project={proj_name}",
"--label",
f"com.docker.compose.project={proj_name}",
net_name,
]
args = get_network_create_args(net_desc, proj_name, net_name)
self.assertEqual(args, expected_args)
def test_ipv6(self):
net_desc = {
"labels": [],
"internal": False,
"driver": None,
"driver_opts": {},
"ipam": {"config": []},
"enable_ipv6": True,
}
proj_name = "test_project"
net_name = "test_network"
expected_args = [
"create",
"--label",
f"io.podman.compose.project={proj_name}",
"--label",
f"com.docker.compose.project={proj_name}",
"--ipv6",
net_name,
]
args = get_network_create_args(net_desc, proj_name, net_name)
self.assertEqual(args, expected_args)
def test_bridge(self):
net_desc = {
"labels": [],
"internal": False,
"driver": "bridge",
"driver_opts": {"opt1": "value1", "opt2": "value2"},
"ipam": {"config": []},
"enable_ipv6": False,
}
proj_name = "test_project"
net_name = "test_network"
expected_args = [
"create",
"--label",
f"io.podman.compose.project={proj_name}",
"--label",
f"com.docker.compose.project={proj_name}",
"--driver",
"bridge",
"--opt",
"opt1=value1",
"--opt",
"opt2=value2",
net_name,
]
args = get_network_create_args(net_desc, proj_name, net_name)
self.assertEqual(args, expected_args)
def test_ipam_driver(self):
net_desc = {
"labels": [],
"internal": False,
"driver": None,
"driver_opts": {},
"ipam": {
"driver": "default",
"config": [
{
"subnet": "192.168.0.0/24",
"ip_range": "192.168.0.2/24",
"gateway": "192.168.0.1",
}
],
},
}
proj_name = "test_project"
net_name = "test_network"
expected_args = [
"create",
"--label",
f"io.podman.compose.project={proj_name}",
"--label",
f"com.docker.compose.project={proj_name}",
"--ipam-driver",
"default",
"--subnet",
"192.168.0.0/24",
"--ip-range",
"192.168.0.2/24",
"--gateway",
"192.168.0.1",
net_name,
]
args = get_network_create_args(net_desc, proj_name, net_name)
self.assertEqual(args, expected_args)
def test_complete(self):
net_desc = {
"labels": ["label1", "label2"],
"internal": True,
"driver": "bridge",
"driver_opts": {"opt1": "value1", "opt2": "value2"},
"ipam": {
"driver": "default",
"config": [
{
"subnet": "192.168.0.0/24",
"ip_range": "192.168.0.2/24",
"gateway": "192.168.0.1",
}
],
},
"enable_ipv6": True,
}
proj_name = "test_project"
net_name = "test_network"
expected_args = [
"create",
"--label",
f"io.podman.compose.project={proj_name}",
"--label",
f"com.docker.compose.project={proj_name}",
"--label",
"label1",
"--label",
"label2",
"--internal",
"--driver",
"bridge",
"--opt",
"opt1=value1",
"--opt",
"opt2=value2",
"--ipam-driver",
"default",
"--ipv6",
"--subnet",
"192.168.0.0/24",
"--ip-range",
"192.168.0.2/24",
"--gateway",
"192.168.0.1",
net_name,
]
args = get_network_create_args(net_desc, proj_name, net_name)
self.assertEqual(args, expected_args)

View File

@ -1,123 +1,129 @@
# SPDX-License-Identifier: GPL-2.0
# pylint: disable=protected-access
from __future__ import annotations
import argparse
import copy
import os
import unittest
import yaml
from parameterized import parameterized
from podman_compose import PodmanCompose
from podman_compose import normalize_final
from podman_compose import normalize_service_final
from podman_compose import (
normalize_service,
normalize,
normalize_final,
normalize_service_final,
PodmanCompose,
)
cwd = os.path.abspath(".")
test_cases_simple_normalization = [
({"image": "test-image"}, {"image": "test-image"}),
(
{"build": "."},
{
"build": {"context": cwd, "dockerfile": "Dockerfile"},
},
),
(
{"build": "../relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "../relative")),
"dockerfile": "Dockerfile",
},
},
),
(
{"build": "./relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "./relative")),
"dockerfile": "Dockerfile",
},
},
),
(
{"build": "/workspace/absolute"},
{
"build": {
"context": "/workspace/absolute",
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {
"dockerfile": "Dockerfile",
},
},
{
"build": {
"context": cwd,
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {
"context": ".",
},
},
{
"build": {
"context": cwd,
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {"context": "../", "dockerfile": "test-dockerfile"},
},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "../")),
"dockerfile": "test-dockerfile",
},
},
),
(
{
"build": {"context": ".", "dockerfile": "./dev/test-dockerfile"},
},
{
"build": {
"context": cwd,
"dockerfile": "./dev/test-dockerfile",
},
},
),
]
class TestNormalizeFinalBuild(unittest.TestCase):
cases_simple_normalization = [
({"image": "test-image"}, {"image": "test-image"}),
(
{"build": "."},
{
"build": {"context": cwd, "dockerfile": "Dockerfile"},
},
),
(
{"build": "../relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "../relative")),
"dockerfile": "Dockerfile",
},
},
),
(
{"build": "./relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "./relative")),
"dockerfile": "Dockerfile",
},
},
),
(
{"build": "/workspace/absolute"},
{
"build": {
"context": "/workspace/absolute",
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {
"dockerfile": "Dockerfile",
},
},
{
"build": {
"context": cwd,
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {
"context": ".",
},
},
{
"build": {
"context": cwd,
"dockerfile": "Dockerfile",
},
},
),
(
{
"build": {"context": "../", "dockerfile": "test-dockerfile"},
},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "../")),
"dockerfile": "test-dockerfile",
},
},
),
(
{
"build": {"context": ".", "dockerfile": "./dev/test-dockerfile"},
},
{
"build": {
"context": cwd,
"dockerfile": "./dev/test-dockerfile",
},
},
),
]
#
# [service.build] is normalised after merges
#
def test_normalize_service_final_returns_absolute_path_in_context() -> None:
project_dir = cwd
for test_input, expected_service in copy.deepcopy(test_cases_simple_normalization):
actual_service = normalize_service_final(test_input, project_dir)
assert expected_service == actual_service
@parameterized.expand(cases_simple_normalization)
def test_normalize_service_final_returns_absolute_path_in_context(self, input, expected):
# Tests that [service.build] is normalized after merges
project_dir = cwd
self.assertEqual(normalize_service_final(input, project_dir), expected)
@parameterized.expand(cases_simple_normalization)
def test_normalize_returns_absolute_path_in_context(self, input, expected):
project_dir = cwd
compose_test = {"services": {"test-service": input}}
compose_expected = {"services": {"test-service": expected}}
self.assertEqual(normalize_final(compose_test, project_dir), compose_expected)
def test_normalize_returns_absolute_path_in_context() -> None:
project_dir = cwd
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
compose_test = {"services": {"test-service": test_input}}
compose_expected = {"services": {"test-service": expected_result}}
actual_compose = normalize_final(compose_test, project_dir)
assert compose_expected == actual_compose
@parameterized.expand(cases_simple_normalization)
def test_parse_compose_file_when_single_compose(self, input, expected):
compose_test = {"services": {"test-service": input}}
#
# running full parse over single compose files
#
def test__parse_compose_file_when_single_compose() -> None:
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
compose_test = {"services": {"test-service": test_input}}
dump_yaml(compose_test, "test-compose.yaml")
podman_compose = PodmanCompose()
@ -129,106 +135,119 @@ class TestNormalizeFinalBuild(unittest.TestCase):
if podman_compose.services:
podman_compose.services["test-service"].pop("_deps")
actual_compose = podman_compose.services["test-service"]
self.assertEqual(actual_compose, expected)
if actual_compose != expected_result:
print("compose: ", test_input)
print("result: ", expected_result)
@parameterized.expand([
(
{},
{"build": "."},
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
),
(
{"build": "."},
{},
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
),
(
{"build": "/workspace/absolute"},
{"build": "./relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "./relative")),
"dockerfile": "Dockerfile",
}
},
),
(
{"build": "./relative"},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
),
(
{"build": "./relative"},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile"}},
{},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile-1"}},
{"build": {"dockerfile": "test-dockerfile-2"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile-2"}},
),
(
{"build": "/workspace/absolute"},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile"}},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "./test-dockerfile-1"}},
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV1=1"]}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1"],
}
},
),
(
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./test-dockerfile-2"}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1"],
}
},
),
(
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV2=2"]}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1", "ENV2=2"],
}
},
),
])
def test_parse_when_multiple_composes(self, input, override, expected):
compose_test_1 = {"services": {"test-service": input}}
compose_test_2 = {"services": {"test-service": override}}
assert expected_result == actual_compose
test_cases_with_merges = [
(
{},
{"build": "."},
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
),
(
{"build": "."},
{},
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
),
(
{"build": "/workspace/absolute"},
{"build": "./relative"},
{
"build": {
"context": os.path.normpath(os.path.join(cwd, "./relative")),
"dockerfile": "Dockerfile",
}
},
),
(
{"build": "./relative"},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
),
(
{"build": "./relative"},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile"}},
{},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile-1"}},
{"build": {"dockerfile": "test-dockerfile-2"}},
{"build": {"context": cwd, "dockerfile": "test-dockerfile-2"}},
),
(
{"build": "/workspace/absolute"},
{"build": {"dockerfile": "test-dockerfile"}},
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "test-dockerfile"}},
{"build": "/workspace/absolute"},
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
),
(
{"build": {"dockerfile": "./test-dockerfile-1"}},
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV1=1"]}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1"],
}
},
),
(
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./test-dockerfile-2"}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1"],
}
},
),
(
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV2=2"]}},
{
"build": {
"context": cwd,
"dockerfile": "./test-dockerfile-2",
"args": ["ENV1=1", "ENV2=2"],
}
},
),
]
#
# running full parse over merged
#
def test__parse_compose_file_when_multiple_composes() -> None:
for test_input, test_override, expected_result in copy.deepcopy(
test_cases_with_merges
):
compose_test_1 = {"services": {"test-service": test_input}}
compose_test_2 = {"services": {"test-service": test_override}}
dump_yaml(compose_test_1, "test-compose-1.yaml")
dump_yaml(compose_test_2, "test-compose-2.yaml")
@ -245,10 +264,18 @@ class TestNormalizeFinalBuild(unittest.TestCase):
if podman_compose.services:
podman_compose.services["test-service"].pop("_deps")
actual_compose = podman_compose.services["test-service"]
self.assertEqual(actual_compose, expected)
if actual_compose != expected_result:
print("compose: ", test_input)
print("override: ", test_override)
print("result: ", expected_result)
compose_expected = expected_result
assert compose_expected == actual_compose
def set_args(podman_compose: PodmanCompose, file_names: list[str], no_normalize: bool) -> None:
def set_args(
podman_compose: PodmanCompose, file_names: list[str], no_normalize: bool
) -> None:
podman_compose.global_args = argparse.Namespace()
podman_compose.global_args.file = file_names
podman_compose.global_args.project_name = None

View File

@ -1,20 +1,21 @@
# SPDX-License-Identifier: GPL-2.0
# pylint: disable=redefined-outer-name
import unittest
import pytest
from podman_compose import parse_short_mount
class ParseShortMountTests(unittest.TestCase):
def test_multi_propagation(self):
self.assertEqual(
parse_short_mount("/foo/bar:/baz:U,Z", "/"),
{
"type": "bind",
"source": "/foo/bar",
"target": "/baz",
"bind": {
"propagation": "U,Z",
},
},
)
@pytest.fixture
def multi_propagation_mount_str():
return "/foo/bar:/baz:U,Z"
def test_parse_short_mount_multi_propagation(multi_propagation_mount_str):
expected = {
"type": "bind",
"source": "/foo/bar",
"target": "/baz",
"bind": {
"propagation": "U,Z",
},
}
assert parse_short_mount(multi_propagation_mount_str, "/") == expected

View File

@ -1,11 +1,10 @@
# SPDX-License-Identifier: GPL-2.0
import os
from setuptools import setup
try:
README = open(os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8").read()
README = open(
os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8"
).read()
except: # noqa: E722 # pylint: disable=bare-except
README = ""
@ -40,10 +39,20 @@ setup(
"pyyaml",
"python-dotenv",
],
extras_require={"devel": ["ruff", "pre-commit", "coverage", "parameterized"]},
extras_require={
"devel": [
"flake8",
"black",
"pylint",
"pre-commit",
"coverage"
]
}
# test_suite='tests',
# tests_require=[
# 'coverage',
# 'pytest-cov',
# 'pytest',
# 'tox',
# ]
)

View File

@ -1,33 +1,9 @@
-e .
coverage==7.4.3
parameterized==0.9.0
pytest==8.0.2
tox==4.13.0
ruff==0.3.1
pylint==3.1.0
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
# The packages below are transitive dependencies of the packages above and are included here
# to make testing reproducible.
# To refresh, create a new virtualenv and do:
# pip install -r requirements.txt -r test-requirements.txt
# pip freeze > test-requirements.txt
# and edit test-requirements.txt to add this comment
astroid==3.1.0
cachetools==5.3.3
chardet==5.2.0
colorama==0.4.6
dill==0.3.8
distlib==0.3.8
filelock==3.13.1
iniconfig==2.0.0
isort==5.13.2
mccabe==0.7.0
packaging==23.2
platformdirs==4.2.0
pluggy==1.4.0
pyproject-api==1.6.1
python-dotenv==1.0.1
PyYAML==6.0.1
tomlkit==0.12.4
virtualenv==20.25.1
coverage
pytest
tox
black
flake8

View File

@ -1,12 +0,0 @@
import os
import subprocess
def create_base_test_image():
subprocess.check_call(
['podman', 'build', '-t', 'nopush/podman-compose-test', '.'],
cwd=os.path.join(os.path.dirname(__file__), "base_image"),
)
create_base_test_image()

View File

@ -1,6 +0,0 @@
FROM docker.io/library/debian:bookworm-slim
RUN apt-get update \
&& apt-get install -y \
dumb-init \
busybox \
wget

View File

@ -1,9 +0,0 @@
FROM busybox
RUN --mount=type=secret,required=true,id=build_secret \
ls -l /run/secrets/ && cat /run/secrets/build_secret
RUN --mount=type=secret,required=true,id=build_secret,target=/tmp/secret \
ls -l /run/secrets/ /tmp/ && cat /tmp/secret
CMD [ 'echo', 'nothing here' ]

View File

@ -1,22 +0,0 @@
version: "3.8"
services:
test:
image: test
secrets:
- run_secret # implicitly mount to /run/secrets/run_secret
- source: run_secret
target: /tmp/run_secret2 # explicit mount point
build:
context: .
secrets:
- build_secret # can be mounted in Dockerfile with "RUN --mount=type=secret,id=build_secret"
- source: build_secret
target: build_secret2 # rename to build_secret2
secrets:
build_secret:
file: ./my_secret
run_secret:
file: ./my_secret

View File

@ -1,18 +0,0 @@
version: "3.8"
services:
test:
image: test
build:
context: .
secrets:
# invalid target argument
#
# According to https://github.com/compose-spec/compose-spec/blob/master/build.md, target is
# supposed to be the "name of a *file* to be mounted in /run/secrets/". Not a path.
- source: build_secret
target: /build_secret
secrets:
build_secret:
file: ./my_secret

View File

@ -1 +0,0 @@
important-secret-is-important

26
tests/conftest.py Normal file
View File

@ -0,0 +1,26 @@
"""conftest.py
Defines global pytest fixtures available to all tests.
"""
# pylint: disable=redefined-outer-name
from pathlib import Path
import os
import pytest
@pytest.fixture
def base_path():
"""Returns the base path for the project"""
return Path(__file__).parent.parent
@pytest.fixture
def test_path(base_path):
"""Returns the path to the tests directory"""
return os.path.join(base_path, "tests")
@pytest.fixture
def podman_compose_path(base_path):
"""Returns the path to the podman compose script"""
return os.path.join(base_path, "podman_compose.py")

View File

@ -1,22 +1,22 @@
version: "3.7"
services:
web:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
tmpfs:
- /run
- /tmp
sleep:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "sh", "-c", "sleep 3600"]
image: busybox
command: ["/bin/busybox", "sh", "-c", "sleep 3600"]
depends_on:
- "web"
tmpfs:
- /run
- /tmp
sleep2:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "sh", "-c", "sleep 3600"]
image: busybox
command: ["/bin/busybox", "sh", "-c", "sleep 3600"]
depends_on:
- sleep
tmpfs:

View File

@ -1,20 +1,20 @@
version: "3"
services:
too_long:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "sh", "-c", "sleep 3600; exit 0"]
image: busybox
command: ["/bin/busybox", "sh", "-c", "sleep 3600; exit 0"]
tmpfs:
- /run
- /tmp
sh1:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "sh", "-c", "sleep 1; exit 1"]
image: busybox
command: ["/bin/busybox", "sh", "-c", "sleep 5; exit 1"]
tmpfs:
- /run
- /tmp
sh2:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "sh", "-c", "sleep 1; exit 2"]
image: busybox
command: ["/bin/busybox", "sh", "-c", "sleep 5; exit 2"]
tmpfs:
- /run
- /tmp

View File

@ -2,6 +2,6 @@ services:
webapp_default:
webapp_special:
image: nopush/podman-compose-test
image: busybox
volumes:
- "/data"

View File

@ -1,7 +1,7 @@
version: "3"
services:
web:
image: nopush/podman-compose-test
image: busybox
extends:
file: common-services.yml
service: webapp_default

View File

@ -1 +1 @@
FROM nopush/podman-compose-test as base
FROM busybox as base

View File

@ -2,6 +2,6 @@ version: '3.6'
services:
web:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", ".", "-p", "8003"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", ".", "-p", "8003"]

View File

@ -1,61 +0,0 @@
version: "3"
networks:
shared-network:
driver: bridge
ipam:
config:
- subnet: "172.19.1.0/24"
internal-network:
driver: bridge
ipam:
config:
- subnet: "172.19.2.0/24"
services:
web1:
image: busybox
hostname: web1
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
networks:
shared-network:
ipv4_address: "172.19.1.10"
x-podman.mac_address: "02:01:01:00:01:01"
internal-network:
ipv4_address: "172.19.2.10"
x-podman.mac_address: "02:01:01:00:02:01"
volumes:
- ./test1.txt:/var/www/html/index.txt:ro,z
web2:
image: busybox
hostname: web2
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
mac_address: "02:01:01:00:02:02"
networks:
internal-network:
ipv4_address: "172.19.2.11"
volumes:
- ./test2.txt:/var/www/html/index.txt:ro,z
web3:
image: busybox
hostname: web2
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
networks:
internal-network:
volumes:
- ./test3.txt:/var/www/html/index.txt:ro,z
web4:
image: busybox
hostname: web2
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
networks:
internal-network:
shared-network:
ipv4_address: "172.19.1.13"
volumes:
- ./test4.txt:/var/www/html/index.txt:ro,z

View File

@ -1 +0,0 @@
test1

View File

@ -1 +0,0 @@
test2

View File

@ -1 +0,0 @@
test3

View File

@ -1 +0,0 @@
test4

View File

@ -1,6 +0,0 @@
version: "3"
services:
serv:
image: busybox
pid: host
command: sh -c "ps all"

View File

@ -1,18 +1,18 @@
version: "3"
services:
web1:
image: nopush/podman-compose-test
image: busybox
hostname: web1
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
ports:
- 8001:8001
volumes:
- ./test1.txt:/var/www/html/index.txt:ro,z
web2:
image: nopush/podman-compose-test
image: busybox
hostname: web2
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8002"]
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8002"]
working_dir: /var/www/html
ports:
- 8002:8002

View File

@ -1,24 +1,24 @@
version: "3"
services:
default-service:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
tmpfs:
- /run
- /tmp
service-1:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
tmpfs:
- /run
- /tmp
profiles:
- profile-1
service-2:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
tmpfs:
- /run
- /tmp
profiles:
- profile-2
- profile-2

View File

@ -1,7 +1,7 @@
version: "3"
services:
redis:
image: redis:alpine
image: docker.io/library/redis:alpine
command: ["redis-server", "--appendonly yes", "--notify-keyspace-events", "Ex"]
volumes:
- ./data/redis:/data:z
@ -12,7 +12,7 @@ services:
- SECRET_KEY=aabbcc
- ENV_IS_SET
web:
image: busybox
image: docker.io/library/busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8000"]
working_dir: /var/www/html
volumes:
@ -21,19 +21,19 @@ services:
- /run
- /tmp
web1:
image: busybox
image: docker.io/library/busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
working_dir: /var/www/html
volumes:
- ./data/web:/var/www/html:ro,z
web2:
image: busybox
image: docker.io/library/busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8002"]
working_dir: /var/www/html
volumes:
- ~/Downloads/www:/var/www/html:ro,z
web3:
image: busybox
image: docker.io/library/busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8003"]
working_dir: /var/www/html
volumes:

View File

@ -1,95 +1,88 @@
# SPDX-License-Identifier: GPL-2.0
import os
import unittest
from pathlib import Path
from .test_utils import RunSubprocessMixin
import subprocess
def base_path():
"""Returns the base path for the project"""
return Path(__file__).parent.parent
def capture(command):
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = proc.communicate()
return out, err, proc.returncode
def test_path():
"""Returns the path to the tests directory"""
return os.path.join(base_path(), "tests")
def test_podman_compose_extends_w_file_subdir():
"""
Test that podman-compose can execute podman-compose -f <file> up with extended File which
includes a build context
:return:
"""
main_path = Path(__file__).parent.parent
command_up = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
"up",
"-d",
]
command_check_container = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
"ps",
"--format",
'{{.Image}}',
]
command_down = [
"podman",
"rmi",
"--force",
"localhost/subdir_test:me",
"docker.io/library/busybox",
]
out, _, returncode = capture(command_up)
assert 0 == returncode
# check container was created and exists
out, err, returncode = capture(command_check_container)
assert 0 == returncode
assert b'localhost/subdir_test:me\n' == out
out, _, returncode = capture(command_down)
# cleanup test image(tags)
assert 0 == returncode
print('ok')
# check container did not exists anymore
out, _, returncode = capture(command_check_container)
assert 0 == returncode
assert b'' == out
def podman_compose_path():
"""Returns the path to the podman compose script"""
return os.path.join(base_path(), "podman_compose.py")
def test_podman_compose_extends_w_empty_service():
"""
Test that podman-compose can execute podman-compose -f <file> up with extended File which
includes an empty service. (e.g. if the file is used as placeholder for more complex configurations.)
:return:
"""
main_path = Path(__file__).parent.parent
command_up = [
"python3",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(
main_path.joinpath("tests", "extends_w_empty_service", "docker-compose.yml")
),
"up",
"-d",
]
class TestPodmanCompose(unittest.TestCase, RunSubprocessMixin):
def test_extends_w_file_subdir(self):
"""
Test that podman-compose can execute podman-compose -f <file> up with extended File which
includes a build context
:return:
"""
main_path = Path(__file__).parent.parent
command_up = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
"up",
"-d",
]
command_check_container = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
"ps",
"--format",
'{{.Image}}',
]
self.run_subprocess_assert_returncode(command_up)
# check container was created and exists
out, _ = self.run_subprocess_assert_returncode(command_check_container)
self.assertEqual(out, b'localhost/subdir_test:me\n')
# cleanup test image(tags)
self.run_subprocess_assert_returncode([
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
"down",
])
self.run_subprocess_assert_returncode([
"podman",
"rmi",
"--force",
"localhost/subdir_test:me",
])
# check container did not exists anymore
out, _ = self.run_subprocess_assert_returncode(command_check_container)
self.assertEqual(out, b'')
def test_extends_w_empty_service(self):
"""
Test that podman-compose can execute podman-compose -f <file> up with extended File which
includes an empty service. (e.g. if the file is used as placeholder for more complex
configurations.)
"""
main_path = Path(__file__).parent.parent
command_up = [
"python3",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "extends_w_empty_service", "docker-compose.yml")),
"up",
"-d",
]
self.run_subprocess_assert_returncode(command_up)
_, _, returncode = capture(command_up)
assert 0 == returncode

View File

@ -1,90 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
"""Test how secrets in files are passed to podman."""
import os
import subprocess
import unittest
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
def compose_yaml_path():
""" "Returns the path to the compose file used for this test module"""
return os.path.join(test_path(), "build_secrets")
class TestComposeBuildSecrets(unittest.TestCase):
def test_run_secret(self):
"""podman run should receive file secrets as --volume
See build_secrets/docker-compose.yaml for secret names and mount points (aka targets)
"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--dry-run",
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml"),
"run",
"test",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertEqual(p.returncode, 0)
secret_path = os.path.join(compose_yaml_path(), "my_secret")
self.assertIn(f"--volume {secret_path}:/run/secrets/run_secret:ro,rprivate,rbind", p.stdout)
self.assertIn(f"--volume {secret_path}:/tmp/run_secret2:ro,rprivate,rbind", p.stdout)
def test_build_secret(self):
"""podman build should receive secrets as --secret, so that they can be used inside the
Dockerfile in "RUN --mount=type=secret ..." commands.
"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--dry-run",
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml"),
"build",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertEqual(p.returncode, 0)
secret_path = os.path.join(compose_yaml_path(), "my_secret")
self.assertIn(f"--secret id=build_secret,src={secret_path}", p.stdout)
self.assertIn(f"--secret id=build_secret2,src={secret_path}", p.stdout)
def test_invalid_build_secret(self):
"""build secrets in docker-compose file can only have a target argument without directory
component
"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--dry-run",
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml.invalid"),
"build",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertNotEqual(p.returncode, 0)
self.assertIn(
'ValueError: ERROR: Build secret "build_secret" has invalid target "/build_secret"',
p.stdout,
)

View File

@ -1,93 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
"""Test how ulimits are applied in podman-compose build."""
import os
import subprocess
import unittest
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
def compose_yaml_path():
""" "Returns the path to the compose file used for this test module"""
return os.path.join(test_path(), "ulimit_build")
class TestComposeBuildUlimits(unittest.TestCase):
def test_build_ulimits_ulimit1(self):
"""podman build should receive and apply limits when building service ulimit1"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml"),
"build",
"--no-cache",
"ulimit1",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertEqual(p.returncode, 0)
self.assertIn("--ulimit nofile=1001", p.stdout)
self.assertIn("soft nofile limit: 1001", p.stdout)
self.assertIn("hard nofile limit: 1001", p.stdout)
def test_build_ulimits_ulimit2(self):
"""podman build should receive and apply limits when building service ulimit2"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml"),
"build",
"--no-cache",
"ulimit2",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertEqual(p.returncode, 0)
self.assertIn("--ulimit nofile=1002", p.stdout)
self.assertIn("--ulimit nproc=1002:2002", p.stdout)
self.assertIn("soft process limit: 1002", p.stdout)
self.assertIn("hard process limit: 2002", p.stdout)
self.assertIn("soft nofile limit: 1002", p.stdout)
self.assertIn("hard nofile limit: 1002", p.stdout)
def test_build_ulimits_ulimit3(self):
"""podman build should receive and apply limits when building service ulimit3"""
cmd = (
"coverage",
"run",
podman_compose_path(),
"--verbose",
"-f",
os.path.join(compose_yaml_path(), "docker-compose.yaml"),
"build",
"--no-cache",
"ulimit3",
)
p = subprocess.run(
cmd, stdout=subprocess.PIPE, check=False, stderr=subprocess.STDOUT, text=True
)
self.assertEqual(p.returncode, 0)
self.assertIn("--ulimit nofile=1003", p.stdout)
self.assertIn("--ulimit nproc=1003:2003", p.stdout)
self.assertIn("soft process limit: 1003", p.stdout)
self.assertIn("hard process limit: 2003", p.stdout)
self.assertIn("soft nofile limit: 1003", p.stdout)
self.assertIn("hard nofile limit: 1003", p.stdout)

View File

@ -1,82 +1,78 @@
# SPDX-License-Identifier: GPL-2.0
"""
test_podman_compose_config.py
Tests the podman-compose config command which is used to return defined compose services.
"""
# pylint: disable=redefined-outer-name
import os
import unittest
from parameterized import parameterized
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
from .test_utils import RunSubprocessMixin
from test_podman_compose import capture
import pytest
def profile_compose_file():
@pytest.fixture
def profile_compose_file(test_path):
""" "Returns the path to the `profile` compose file used for this test module"""
return os.path.join(test_path(), "profile", "docker-compose.yml")
return os.path.join(test_path, "profile", "docker-compose.yml")
class TestComposeConfig(unittest.TestCase, RunSubprocessMixin):
def test_config_no_profiles(self):
"""
Tests podman-compose config command without profile enablement.
"""
config_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
profile_compose_file(),
"config",
]
def test_config_no_profiles(podman_compose_path, profile_compose_file):
"""
Tests podman-compose config command without profile enablement.
out, _ = self.run_subprocess_assert_returncode(config_cmd)
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
"""
config_cmd = ["coverage", "run", podman_compose_path, "-f", profile_compose_file, "config"]
string_output = out.decode("utf-8")
self.assertIn("default-service", string_output)
self.assertNotIn("service-1", string_output)
self.assertNotIn("service-2", string_output)
out, _, return_code = capture(config_cmd)
assert return_code == 0
@parameterized.expand(
[
(
["--profile", "profile-1", "config"],
{"default-service": True, "service-1": True, "service-2": False},
),
(
["--profile", "profile-2", "config"],
{"default-service": True, "service-1": False, "service-2": True},
),
(
["--profile", "profile-1", "--profile", "profile-2", "config"],
{"default-service": True, "service-1": True, "service-2": True},
),
],
)
def test_config_profiles(self, profiles, expected_services):
"""
Tests podman-compose
:param profiles: The enabled profiles for the parameterized test.
:param expected_services: Dictionary used to model the expected "enabled" services in the
profile. Key = service name, Value = True if the service is enabled, otherwise False.
"""
config_cmd = ["coverage", "run", podman_compose_path(), "-f", profile_compose_file()]
config_cmd.extend(profiles)
string_output = out.decode("utf-8")
assert "default-service" in string_output
assert "service-1" not in string_output
assert "service-2" not in string_output
out, _ = self.run_subprocess_assert_returncode(config_cmd)
actual_output = out.decode("utf-8")
@pytest.mark.parametrize(
"profiles, expected_services",
[
(
["--profile", "profile-1", "config"],
{"default-service": True, "service-1": True, "service-2": False},
),
(
["--profile", "profile-2", "config"],
{"default-service": True, "service-1": False, "service-2": True},
),
(
["--profile", "profile-1", "--profile", "profile-2", "config"],
{"default-service": True, "service-1": True, "service-2": True},
),
],
)
def test_config_profiles(
podman_compose_path, profile_compose_file, profiles, expected_services
):
"""
Tests podman-compose
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
:param profiles: The enabled profiles for the parameterized test.
:param expected_services: Dictionary used to model the expected "enabled" services in the profile.
Key = service name, Value = True if the service is enabled, otherwise False.
"""
config_cmd = ["coverage", "run", podman_compose_path, "-f", profile_compose_file]
config_cmd.extend(profiles)
self.assertEqual(len(expected_services), 3)
out, _, return_code = capture(config_cmd)
assert return_code == 0
actual_services = {}
for service, _ in expected_services.items():
actual_services[service] = service in actual_output
actual_output = out.decode("utf-8")
self.assertEqual(expected_services, actual_services)
assert len(expected_services) == 3
actual_services = {}
for service, _ in expected_services.items():
actual_services[service] = service in actual_output
assert expected_services == actual_services

View File

@ -1,62 +1,72 @@
# SPDX-License-Identifier: GPL-2.0
import unittest
from pathlib import Path
from .test_utils import RunSubprocessMixin
import subprocess
class TestPodmanComposeInclude(unittest.TestCase, RunSubprocessMixin):
def test_podman_compose_include(self):
"""
Test that podman-compose can execute podman-compose -f <file> up with include
:return:
"""
main_path = Path(__file__).parent.parent
def capture(command):
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = proc.communicate()
return out, err, proc.returncode
command_up = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "include", "docker-compose.yaml")),
"up",
"-d",
]
command_check_container = [
"podman",
"ps",
"-a",
"--filter",
"label=io.podman.compose.project=include",
"--format",
'"{{.Image}}"',
]
def test_podman_compose_include():
"""
Test that podman-compose can execute podman-compose -f <file> up with include
:return:
"""
main_path = Path(__file__).parent.parent
command_container_id = [
"podman",
"ps",
"-a",
"--filter",
"label=io.podman.compose.project=include",
"--format",
'"{{.ID}}"',
]
command_up = [
"coverage",
"run",
str(main_path.joinpath("podman_compose.py")),
"-f",
str(main_path.joinpath("tests", "include", "docker-compose.yaml")),
"up",
"-d",
]
command_down = ["podman", "rm", "--force", "CONTAINER_ID"]
command_check_container = [
"podman",
"ps",
"-a",
"--filter",
"label=io.podman.compose.project=include",
"--format",
'"{{.Image}}"',
]
self.run_subprocess_assert_returncode(command_up)
out, _ = self.run_subprocess_assert_returncode(command_check_container)
self.assertEqual(out, b'"localhost/nopush/podman-compose-test:latest"\n')
# Get container ID to remove it
out, _ = self.run_subprocess_assert_returncode(command_container_id)
self.assertNotEqual(out, b"")
container_id = out.decode().strip().replace('"', "")
command_down[3] = container_id
out, _ = self.run_subprocess_assert_returncode(command_down)
# cleanup test image(tags)
self.assertNotEqual(out, b"")
# check container did not exists anymore
out, _ = self.run_subprocess_assert_returncode(command_check_container)
self.assertEqual(out, b"")
command_container_id = [
"podman",
"ps",
"-a",
"--filter",
"label=io.podman.compose.project=include",
"--format",
'"{{.ID}}"',
]
command_down = ["podman", "rm", "--force", "CONTAINER_ID"]
out, _, returncode = capture(command_up)
assert 0 == returncode
out, _, returncode = capture(command_check_container)
assert 0 == returncode
assert out == b'"docker.io/library/busybox:latest"\n'
# Get container ID to remove it
out, _, returncode = capture(command_container_id)
assert 0 == returncode
assert out != b""
container_id = out.decode().strip().replace('"', "")
command_down[3] = container_id
out, _, returncode = capture(command_down)
# cleanup test image(tags)
assert 0 == returncode
assert out != b""
# check container did not exists anymore
out, _, returncode = capture(command_check_container)
assert 0 == returncode
assert out == b""

View File

@ -1,116 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
"""
test_podman_compose_networks.py
Tests the podman networking parameters
"""
# pylint: disable=redefined-outer-name
import os
import unittest
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
from .test_utils import RunSubprocessMixin
class TestPodmanComposeNetwork(RunSubprocessMixin, unittest.TestCase):
@staticmethod
def compose_file():
"""Returns the path to the compose file used for this test module"""
return os.path.join(test_path(), "nets_test_ip", "docker-compose.yml")
def teardown(self):
"""
Ensures that the services within the "profile compose file" are removed between
each test case.
"""
# run the test case
yield
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
self.compose_file(),
"kill",
"-a",
]
self.run_subprocess(down_cmd)
def test_networks(self):
up_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
self.compose_file(),
"up",
"-d",
"--force-recreate",
]
self.run_subprocess_assert_returncode(up_cmd)
check_cmd = [
podman_compose_path(),
"-f",
self.compose_file(),
"ps",
"--format",
'"{{.Names}}"',
]
out, _ = self.run_subprocess_assert_returncode(check_cmd)
self.assertIn(b"nets_test_ip_web1_1", out)
self.assertIn(b"nets_test_ip_web2_1", out)
expected_wget = {
"172.19.1.10": "test1",
"172.19.2.10": "test1",
"172.19.2.11": "test2",
"web3": "test3",
"172.19.1.13": "test4",
}
for service in ("web1", "web2"):
for ip, expect in expected_wget.items():
wget_cmd = [
podman_compose_path(),
"-f",
self.compose_file(),
"exec",
service,
"wget",
"-q",
"-O-",
f"http://{ip}:8001/index.txt",
]
out, _ = self.run_subprocess_assert_returncode(wget_cmd)
self.assertEqual(f"{expect}\r\n", out.decode('utf-8'))
expected_macip = {
"web1": {
"eth0": ["172.19.1.10", "02:01:01:00:01:01"],
"eth1": ["172.19.2.10", "02:01:01:00:02:01"],
},
"web2": {"eth0": ["172.19.2.11", "02:01:01:00:02:02"]},
}
for service, interfaces in expected_macip.items():
ip_cmd = [
podman_compose_path(),
"-f",
self.compose_file(),
"exec",
service,
"ip",
"addr",
"show",
]
out, _ = self.run_subprocess_assert_returncode(ip_cmd)
for interface, values in interfaces.items():
ip, mac = values
self.assertIn(f"ether {mac}", out.decode('utf-8'))
self.assertIn(f"inet {ip}/", out.decode('utf-8'))

View File

@ -1,189 +1,180 @@
# SPDX-License-Identifier: GPL-2.0
"""
test_podman_compose_up_down.py
Tests the podman compose up and down commands used to create and remove services.
"""
# pylint: disable=redefined-outer-name
import os
import unittest
import time
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
from .test_utils import RunSubprocessMixin
from test_podman_compose import capture
class TestPodmanCompose(unittest.TestCase, RunSubprocessMixin):
def test_exit_from(self):
up_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "exit-from", "docker-compose.yaml"),
"up",
]
def test_exit_from(podman_compose_path, test_path):
up_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "exit-from", "docker-compose.yaml"),
"up"
]
self.run_subprocess_assert_returncode(up_cmd + ["--exit-code-from", "sh1"], 1)
self.run_subprocess_assert_returncode(up_cmd + ["--exit-code-from", "sh2"], 2)
out, _, return_code = capture(up_cmd + ["--exit-code-from", "sh1"])
assert return_code == 1
def test_run(self):
"""
This will test depends_on as well
"""
run_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "deps", "docker-compose.yaml"),
"run",
"--rm",
"sleep",
"/bin/sh",
"-c",
"wget -q -O - http://web:8000/hosts",
]
out, _, return_code = capture(up_cmd + ["--exit-code-from", "sh2"])
assert return_code == 2
out, _ = self.run_subprocess_assert_returncode(run_cmd)
self.assertIn(b'127.0.0.1\tlocalhost', out)
# Run it again to make sure we can run it twice. I saw an issue where a second run, with
# the container left up, would fail
run_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "deps", "docker-compose.yaml"),
"run",
"--rm",
"sleep",
"/bin/sh",
"-c",
"wget -q -O - http://web:8000/hosts",
]
def test_run(podman_compose_path, test_path):
"""
This will test depends_on as well
"""
run_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "deps", "docker-compose.yaml"),
"run",
"--rm",
"sleep",
"/bin/sh",
"-c",
"wget -q -O - http://web:8000/hosts"
]
out, _ = self.run_subprocess_assert_returncode(run_cmd)
self.assertIn(b'127.0.0.1\tlocalhost', out)
out, _, return_code = capture(run_cmd)
assert b'127.0.0.1\tlocalhost' in out
# This leaves a container running. Not sure it's intended, but it matches docker-compose
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "deps", "docker-compose.yaml"),
"down",
]
# Run it again to make sure we can run it twice. I saw an issue where a second run, with the container left up,
# would fail
run_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "deps", "docker-compose.yaml"),
"run",
"--rm",
"sleep",
"/bin/sh",
"-c",
"wget -q -O - http://web:8000/hosts"
]
self.run_subprocess_assert_returncode(down_cmd)
out, _, return_code = capture(run_cmd)
assert b'127.0.0.1\tlocalhost' in out
assert return_code == 0
def test_up_with_ports(self):
up_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "ports", "docker-compose.yml"),
"up",
"-d",
"--force-recreate",
]
# This leaves a container running. Not sure it's intended, but it matches docker-compose
down_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "deps", "docker-compose.yaml"),
"down",
]
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "ports", "docker-compose.yml"),
"down",
"--volumes",
]
out, _, return_code = capture(run_cmd)
assert return_code == 0
try:
self.run_subprocess_assert_returncode(up_cmd)
finally:
self.run_subprocess_assert_returncode(down_cmd)
def test_up_with_ports(podman_compose_path, test_path):
def test_down_with_vols(self):
up_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "vol", "docker-compose.yaml"),
"up",
"-d",
]
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "vol", "docker-compose.yaml"),
"down",
"--volumes",
]
up_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "ports", "docker-compose.yml"),
"up",
"-d",
"--force-recreate"
]
try:
self.run_subprocess_assert_returncode(["podman", "volume", "create", "my-app-data"])
self.run_subprocess_assert_returncode([
"podman",
"volume",
"create",
"actual-name-of-volume",
])
down_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "ports", "docker-compose.yml"),
"down",
"--volumes"
]
self.run_subprocess_assert_returncode(up_cmd)
self.run_subprocess(["podman", "inspect", "volume", ""])
try:
out, _, return_code = capture(up_cmd)
assert return_code == 0
finally:
out, _, return_code = self.run_subprocess(down_cmd)
self.run_subprocess(["podman", "volume", "rm", "my-app-data"])
self.run_subprocess(["podman", "volume", "rm", "actual-name-of-volume"])
self.assertEqual(return_code, 0)
def test_down_with_orphans(self):
container_id, _ = self.run_subprocess_assert_returncode([
"podman",
"run",
"--rm",
"-d",
"nopush/podman-compose-test",
"dumb-init",
"/bin/busybox",
"httpd",
"-f",
"-h",
"/etc/",
"-p",
"8000",
])
finally:
out, _, return_code = capture(down_cmd)
assert return_code == 0
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
os.path.join(test_path(), "ports", "docker-compose.yml"),
"down",
"--volumes",
"--remove-orphans",
]
self.run_subprocess_assert_returncode(down_cmd)
def test_down_with_vols(podman_compose_path, test_path):
up_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "vol", "docker-compose.yaml"),
"up",
"-d"
]
down_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "vol", "docker-compose.yaml"),
"down",
"--volumes"
]
try:
out, _, return_code = capture(["podman", "volume", "create", "my-app-data"])
assert return_code == 0
out, _, return_code = capture(["podman", "volume", "create", "actual-name-of-volume"])
assert return_code == 0
out, _, return_code = capture(up_cmd)
assert return_code == 0
capture(["podman", "inspect", "volume", ""])
finally:
out, _, return_code = capture(down_cmd)
capture(["podman", "volume", "rm", "my-app-data"])
capture(["podman", "volume", "rm", "actual-name-of-volume"])
assert return_code == 0
def test_down_with_orphans(podman_compose_path, test_path):
container_id, _ , return_code = capture(["podman", "run", "--rm", "-d", "busybox", "/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"])
down_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
os.path.join(test_path, "ports", "docker-compose.yml"),
"down",
"--volumes",
"--remove-orphans"
]
out, _, return_code = capture(down_cmd)
assert return_code == 0
_, _, exists = capture(["podman", "container", "exists", container_id.decode("utf-8")])
assert exists == 1
self.run_subprocess_assert_returncode(
[
"podman",
"container",
"exists",
container_id.decode("utf-8"),
],
1,
)

View File

@ -1,91 +1,91 @@
# SPDX-License-Identifier: GPL-2.0
"""
test_podman_compose_up_down.py
Tests the podman compose up and down commands used to create and remove services.
"""
# pylint: disable=redefined-outer-name
import os
import unittest
from parameterized import parameterized
from .test_podman_compose import podman_compose_path
from .test_podman_compose import test_path
from .test_utils import RunSubprocessMixin
from test_podman_compose import capture
import pytest
def profile_compose_file():
@pytest.fixture
def profile_compose_file(test_path):
""" "Returns the path to the `profile` compose file used for this test module"""
return os.path.join(test_path(), "profile", "docker-compose.yml")
return os.path.join(test_path, "profile", "docker-compose.yml")
class TestUpDown(unittest.TestCase, RunSubprocessMixin):
def tearDown(self):
"""
Ensures that the services within the "profile compose file" are removed between each test
case.
"""
# run the test case
@pytest.fixture(autouse=True)
def teardown(podman_compose_path, profile_compose_file):
"""
Ensures that the services within the "profile compose file" are removed between each test case.
down_cmd = [
"coverage",
"run",
podman_compose_path(),
"--profile",
"profile-1",
"--profile",
"profile-2",
"-f",
profile_compose_file(),
"down",
]
self.run_subprocess(down_cmd)
:param podman_compose_path: The path to the podman compose script.
:param profile_compose_file: The path to the compose file used for this test module.
"""
# run the test case
yield
@parameterized.expand(
[
(
["--profile", "profile-1", "up", "-d"],
{"default-service": True, "service-1": True, "service-2": False},
),
(
["--profile", "profile-2", "up", "-d"],
{"default-service": True, "service-1": False, "service-2": True},
),
(
["--profile", "profile-1", "--profile", "profile-2", "up", "-d"],
{"default-service": True, "service-1": True, "service-2": True},
),
],
)
def test_up(self, profiles, expected_services):
up_cmd = [
"coverage",
"run",
podman_compose_path(),
"-f",
profile_compose_file(),
]
up_cmd.extend(profiles)
down_cmd = [
"coverage",
"run",
podman_compose_path,
"--profile",
"profile-1",
"--profile",
"profile-2",
"-f",
profile_compose_file,
"down",
]
capture(down_cmd)
self.run_subprocess_assert_returncode(up_cmd)
check_cmd = [
"podman",
"container",
"ps",
"--format",
'"{{.Names}}"',
]
out, _ = self.run_subprocess_assert_returncode(check_cmd)
@pytest.mark.parametrize(
"profiles, expected_services",
[
(
["--profile", "profile-1", "up", "-d"],
{"default-service": True, "service-1": True, "service-2": False},
),
(
["--profile", "profile-2", "up", "-d"],
{"default-service": True, "service-1": False, "service-2": True},
),
(
["--profile", "profile-1", "--profile", "profile-2", "up", "-d"],
{"default-service": True, "service-1": True, "service-2": True},
),
],
)
def test_up(podman_compose_path, profile_compose_file, profiles, expected_services):
up_cmd = [
"coverage",
"run",
podman_compose_path,
"-f",
profile_compose_file,
]
up_cmd.extend(profiles)
self.assertEqual(len(expected_services), 3)
actual_output = out.decode("utf-8")
out, _, return_code = capture(up_cmd)
assert return_code == 0
actual_services = {}
for service, _ in expected_services.items():
actual_services[service] = service in actual_output
check_cmd = [
"podman",
"container",
"ps",
"--format",
'"{{.Names}}"',
]
out, _, return_code = capture(check_cmd)
assert return_code == 0
self.assertEqual(expected_services, actual_services)
assert len(expected_services) == 3
actual_output = out.decode("utf-8")
actual_services = {}
for service, _ in expected_services.items():
actual_services[service] = service in actual_output
assert expected_services == actual_services

View File

@ -1,38 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
import os
import subprocess
import time
class RunSubprocessMixin:
def is_debug_enabled(self):
return "TESTS_DEBUG" in os.environ
def run_subprocess(self, args):
begin = time.time()
if self.is_debug_enabled():
print("TEST_CALL", args)
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = proc.communicate()
if self.is_debug_enabled():
print("TEST_CALL completed", time.time() - begin)
print("STDOUT:", out.decode('utf-8'))
print("STDERR:", err.decode('utf-8'))
return out, err, proc.returncode
def run_subprocess_assert_returncode(self, args, expected_returncode=0):
out, err, returncode = self.run_subprocess(args)
decoded_out = out.decode('utf-8')
decoded_err = err.decode('utf-8')
self.assertEqual(
returncode,
expected_returncode,
f"Invalid return code of process {returncode} != {expected_returncode}\n"
f"stdout: {decoded_out}\nstderr: {decoded_err}\n",
)
return out, err

View File

@ -1,5 +0,0 @@
FROM busybox
COPY ./ulimit.sh /bin/ulimit.sh
RUN /bin/ulimit.sh

View File

@ -1,26 +0,0 @@
version: "3"
services:
ulimit1:
image: ulimit_build_test
build:
context: ./
dockerfile: Dockerfile
ulimits: nofile=1001
ulimit2:
image: ulimit_build_test
build:
context: ./
dockerfile: Dockerfile
ulimits:
- nproc=1002:2002
- nofile=1002
ulimit3:
image: ulimit_build_test
build:
context: ./
dockerfile: Dockerfile
ulimits:
nofile: 1003
nproc:
soft: 1003
hard: 2003

View File

@ -1,6 +0,0 @@
#!/bin/sh
echo "soft process limit:" $(ulimit -S -u)
echo "hard process limit:" $(ulimit -H -u)
echo "soft nofile limit:" $(ulimit -S -n)
echo "hard nofile limit:" $(ulimit -H -n)

View File

@ -1,8 +1,8 @@
version: "3"
services:
web:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8000"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8000"]
working_dir: /var/www/html
restart: always
volumes:
@ -11,21 +11,21 @@ services:
- /run
- /tmp
web1:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
restart: unless-stopped
working_dir: /var/www/html
volumes:
- myvol1:/var/www/html:ro,z
web2:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8002"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8002"]
working_dir: /var/www/html
volumes:
- myvol2:/var/www/html:ro
web3:
image: nopush/podman-compose-test
command: ["dumb-init", "/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8003"]
image: busybox
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8003"]
working_dir: /var/www/html
volumes:
- myvol2:/var/www/html