forked from extern/podman-compose
Compare commits
237 Commits
Author | SHA1 | Date | |
---|---|---|---|
bce40c2db3 | |||
78f8cad7c4 | |||
7942a540cd | |||
cb9cf6002f | |||
06587c1dca | |||
bc9168b039 | |||
57c527c2c9 | |||
d1f5ac9edc | |||
0164c1db56 | |||
e5cdce4e7d | |||
280f1770bf | |||
f75d12af21 | |||
5454c3ad0f | |||
901adf47d0 | |||
bf07e91163 | |||
3890eacf57 | |||
cfd24cc2e8 | |||
79bfad103c | |||
d1509468c3 | |||
9011e9faa1 | |||
517aeba330 | |||
85d5d5dcc9 | |||
1ffd24dcf9 | |||
8c66b1cda7 | |||
a0005db474 | |||
221cf14501 | |||
a61945b516 | |||
6b6330c587 | |||
5d279c4948 | |||
5a3bdbf89b | |||
1eb166445b | |||
82182b7bc6 | |||
3f4618866b | |||
91bc6ebdb4 | |||
59a59c1a3a | |||
620f5d7473 | |||
6f902faed0 | |||
ccdf01e9b0 | |||
e6b1eabe4c | |||
75de39c239 | |||
874192568f | |||
0b853f29f4 | |||
847f01a6c6 | |||
e511e6420f | |||
a9723ec1cf | |||
1cb608d8a7 | |||
252f1d57a5 | |||
13856d2e9c | |||
8d8df0bc28 | |||
bc5f0123d9 | |||
9a08f85ffd | |||
8625d7a4e8 | |||
016c97fd1e | |||
2df11674c4 | |||
5eff38e743 | |||
7f5ce26b1b | |||
f6dbce3618 | |||
dfb64d884d | |||
990f774659 | |||
5e518c7ca7 | |||
9046f7eee0 | |||
ef55067834 | |||
ed2a6c0917 | |||
b4c0792995 | |||
e84451f4ea | |||
456370bd46 | |||
efe3714266 | |||
c55a2f4c26 | |||
b8a7593026 | |||
bd29ddb3e9 | |||
38219eb85c | |||
08ffcf6126 | |||
801faea30b | |||
06da9667f3 | |||
de3f607758 | |||
db1861d33f | |||
9d5b255927 | |||
2d05c5c339 | |||
3c460160e0 | |||
5b9cfe5d17 | |||
8d1a4d7274 | |||
859f03cbe6 | |||
ae6be272b5 | |||
ccdb98c0e4 | |||
909d05e718 | |||
0cf98c7893 | |||
843b876885 | |||
1188463734 | |||
10580db329 | |||
f7d335dc6a | |||
4a73ae86bc | |||
f674ab8cfb | |||
265e0ca32a | |||
92662f3409 | |||
42c0078e6b | |||
da5ee723c3 | |||
06fc0715fe | |||
9eda56caf9 | |||
13c8981c6d | |||
ee7029fbde | |||
75033a4ed7 | |||
c175fd1b10 | |||
d479001454 | |||
a2defdd06a | |||
c55cd67bd2 | |||
eed38ce76c | |||
86ffad86c7 | |||
118d39b5bb | |||
814bd2a31a | |||
606b9d94c8 | |||
0057a4bb31 | |||
8ecb74916d | |||
d983056982 | |||
ed302ca518 | |||
0b5c844431 | |||
9c29c8914f | |||
89d2062579 | |||
f42b568fc2 | |||
a1d3ba4ea2 | |||
6be661f6da | |||
fc3598faf2 | |||
fbff315e18 | |||
fc34703dd4 | |||
c7ada820de | |||
5e286f6356 | |||
3dd8b05d74 | |||
3ecb4b5dd5 | |||
d05cad4c65 | |||
ebb3dfe634 | |||
7b99b38f0e | |||
4ef8afc63e | |||
a1aed09a58 | |||
2cacf9cfb5 | |||
4064c84521 | |||
0dde95ac1d | |||
1be41b46a5 | |||
105c27c8dc | |||
f820594257 | |||
8a72321720 | |||
529391963d | |||
48a19f13fc | |||
a9faabb1b0 | |||
3fb2b98ecc | |||
b35b7e448a | |||
1a72e1e087 | |||
b620311aaf | |||
bf8004b04d | |||
cadf046306 | |||
8d8149cfe5 | |||
3dd981727b | |||
0b469e0590 | |||
9e3020a9df | |||
fc9ed19b2b | |||
2d6bb52e36 | |||
7942a091c3 | |||
701311aa7a | |||
d7049150d0 | |||
3b7bf81051 | |||
a735aa5b96 | |||
b78509527b | |||
762318093c | |||
af10345483 | |||
2d1bcddf09 | |||
4f025679cf | |||
064521255b | |||
b7c5609603 | |||
44508352e8 | |||
5c33e4efbb | |||
cbd6f6b1b6 | |||
de1e59d1d5 | |||
2f0ca9e41d | |||
59c9a69689 | |||
b7eac1e898 | |||
0d47e470fc | |||
c2d7b26f2e | |||
1e895c0873 | |||
132a22b524 | |||
0bde01de07 | |||
91a579b81e | |||
56b88639ad | |||
5c3ec5f49a | |||
779198b003 | |||
40cb6a760e | |||
4fd9d86e17 | |||
2a2c3a09c1 | |||
80e852717d | |||
d6e21dc752 | |||
b9b2f83d04 | |||
9af65ea112 | |||
3e6e268034 | |||
af6a3069ce | |||
68f745fe62 | |||
90dcfdbf44 | |||
ed8635a9a3 | |||
1d972ef174 | |||
536925ca78 | |||
09c6cbe503 | |||
154a51245f | |||
523d215b48 | |||
25494b5f6e | |||
19662c02a1 | |||
4943e52344 | |||
4aa08cd016 | |||
15e0ab9261 | |||
f66861f89a | |||
af53b65068 | |||
890c584881 | |||
0bd493f1ba | |||
481c6d0a41 | |||
31df70b8d2 | |||
df400518a7 | |||
21a716cfd3 | |||
f00ac92640 | |||
0433410702 | |||
0f9fe2bf9f | |||
a1be5ce6b3 | |||
56a4988481 | |||
377b5525c9 | |||
c50599c0e7 | |||
4557279930 | |||
7ad377557d | |||
30051c2f5b | |||
9e8e25c159 | |||
2c60516f77 | |||
24ec539932 | |||
2803046ac3 | |||
d1768c1d9d | |||
820ea012c5 | |||
5ba96a1082 | |||
49fe6e7e0f | |||
6c1ccfcefa | |||
724d2fd18c | |||
3e940579d9 | |||
af1697e9bf | |||
e62f1a54af | |||
179f9ab0e3 | |||
dd6b1ee88c |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -35,7 +35,7 @@ What is the behavior you actually got and that should not happen.
|
||||
```
|
||||
$ podman-compose version
|
||||
using podman version: 3.4.0
|
||||
podman-composer version 0.1.7dev
|
||||
podman-compose version 0.1.7dev
|
||||
podman --version
|
||||
podman version 3.4.0
|
||||
|
||||
|
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
41
.github/workflows/pylint.yml
vendored
Normal file
41
.github/workflows/pylint.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Pylint
|
||||
|
||||
on:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
jobs:
|
||||
lint-black:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install psf/black requirements
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y python3 python3-venv
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
options: "--check --verbose"
|
||||
version: "~= 23.3"
|
||||
|
||||
lint-pylint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
pip install pylint
|
||||
- name: Analysing the code with pylint
|
||||
run: |
|
||||
python -m compileall podman_compose.py
|
||||
pylint podman_compose.py
|
||||
# pylint $(git ls-files '*.py')
|
36
.github/workflows/pytest.yml
vendored
Normal file
36
.github/workflows/pytest.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a single version of Python
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: PyTest
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ devel ]
|
||||
pull_request:
|
||||
branches: [ devel ]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -m pytest ./pytests
|
||||
|
32
.pre-commit-config.yaml
Normal file
32
.pre-commit-config.yaml
Normal file
@ -0,0 +1,32 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
# It is recommended to specify the latest version of Python
|
||||
# supported by your project here, or alternatively use
|
||||
# pre-commit's default_language_version, see
|
||||
# https://pre-commit.com/#top_level-default_language_version
|
||||
language_version: python3.10
|
||||
types: [python]
|
||||
args: [
|
||||
"--check", # Don't apply changes automatically
|
||||
]
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
types: [python]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: pylint
|
||||
language: system
|
||||
types: [python]
|
||||
args:
|
||||
[
|
||||
"-rn", # Only display messages
|
||||
"-sn", # Don't display the score
|
||||
"--rcfile=.pylintrc", # Link to your config file
|
||||
]
|
11
.pylintrc
11
.pylintrc
@ -1,13 +1,18 @@
|
||||
[MESSAGES CONTROL]
|
||||
disable=W0614,C0410,C0321,C0111,I0011,C0103
|
||||
# C0111 missing-docstring: missing-class-docstring, missing-function-docstring, missing-method-docstring, missing-module-docstrin
|
||||
# consider-using-with: we need it for color formatter pipe
|
||||
disable=too-many-lines,too-many-branches,too-many-locals,too-many-statements,too-many-arguments,too-many-instance-attributes,fixme,multiple-statements,missing-docstring,line-too-long,consider-using-f-string,consider-using-with,unnecessary-lambda-assignment
|
||||
# allow _ for ignored variables
|
||||
# allow generic names like a,b,c and i,j,k,l,m,n and x,y,z
|
||||
# allow k,v for key/value
|
||||
# allow e for exceptions, it for iterator
|
||||
# allow e for exceptions, it for iterator, ix for index
|
||||
# allow ip for ip address
|
||||
# allow w,h for width, height
|
||||
# allow op for operation/operator/opcode
|
||||
# allow t, t0, t1, t2, and t3 for time
|
||||
# allow dt for delta time
|
||||
# allow db for database
|
||||
# allow ls for list
|
||||
good-names=_,a,b,c,dt,db,e,f,fn,fd,i,j,k,v,kv,kw,l,m,n,ls,t,t0,t1,t2,t3,w,h,x,y,z,it,op
|
||||
# allow p for pipe
|
||||
# allow ex for examples, exists ..etc
|
||||
good-names=_,a,b,c,dt,db,e,f,fn,fd,i,j,k,v,kv,kw,l,m,n,ls,t,t0,t1,t2,t3,w,h,x,y,z,it,ix,ip,op,p,ex
|
||||
|
@ -1,5 +1,49 @@
|
||||
# Contributing to podman-compose
|
||||
|
||||
## Who can contribute?
|
||||
|
||||
- Users that found a bug
|
||||
- Users that wants to propose new functionalities or enhancements
|
||||
- Users that want to help other users to troubleshoot their environments
|
||||
- Developers that want to fix bugs
|
||||
- Developers that want to implement new functionalities or enhancements
|
||||
|
||||
## Branches
|
||||
|
||||
Please request your PR to be merged into the `devel` branch.
|
||||
Changes to the `stable` branch are managed by the repository maintainers.
|
||||
|
||||
## Development environment setup
|
||||
|
||||
Note: Some steps are OPTIONAL but all are RECOMMENDED.
|
||||
|
||||
1. Fork the project repo and clone it
|
||||
```shell
|
||||
$ git clone https://github.com/USERNAME/podman-compose.git
|
||||
$ cd podman-compose
|
||||
```
|
||||
1. (OPTIONAL) Create a python virtual environment. Example using [virtualenv wrapper](https://virtualenvwrapper.readthedocs.io/en/latest/):
|
||||
```shell
|
||||
mkvirtualenv podman-compose
|
||||
```
|
||||
2. Install the project runtime and development requirements
|
||||
```shell
|
||||
$ pip install '.[devel]'
|
||||
```
|
||||
3. (OPTIONAL) Install `pre-commit` git hook scripts (https://pre-commit.com/#3-install-the-git-hook-scripts)
|
||||
```shell
|
||||
$ pre-commit install
|
||||
```
|
||||
4. Create a new branch, develop and add tests when possible
|
||||
5. Run linting & testing before commiting code. Ensure all the hooks are passing.
|
||||
```shell
|
||||
$ pre-commit run --all-files
|
||||
```
|
||||
6. Commit your code to your fork's branch.
|
||||
- Make sure you include a `Signed-off-by` message in your commits. Read [this guide](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) to learn how to sign your commits
|
||||
- In the commit message reference the Issue ID that your code fixes and a brief description of the changes. Example: `Fixes #516: allow empty network`
|
||||
7. Open a PR to `containers/podman-compose:devel` and wait for a maintainer to review your work.
|
||||
|
||||
## Adding new commands
|
||||
|
||||
To add a command you need to add a function that is decorated
|
||||
@ -60,15 +104,11 @@ def compose_up(compose, args):
|
||||
create Create services
|
||||
events Receive real time events from containers
|
||||
images List images
|
||||
kill Kill containers
|
||||
logs View output from containers
|
||||
pause Pause services
|
||||
port Print the public port for a port binding
|
||||
ps List containers
|
||||
rm Remove stopped containers
|
||||
run Run a one-off command
|
||||
scale Set number of containers for a service
|
||||
top Display the running processes
|
||||
unpause Unpause services
|
||||
version Show the Docker-Compose version information
|
||||
```
|
||||
|
49
README.md
49
README.md
@ -1,19 +1,22 @@
|
||||
# Podman Compose
|
||||
## [](https://github.com/containers/podman-compose/actions/workflows/pylint.yml) [](https://github.com/containers/podman-compose/actions/workflows/pytest.yml)
|
||||
|
||||
|
||||
An implementation of [Compose Spec](https://compose-spec.io/) with [Podman](https://podman.io/) backend.
|
||||
This project focus on:
|
||||
This project focuses on:
|
||||
|
||||
* rootless
|
||||
* daemon-less process model, we directly execute podman, no running daemon.
|
||||
|
||||
This project only depend on:
|
||||
This project only depends on:
|
||||
|
||||
* `podman`
|
||||
* [podman dnsname plugin](https://github.com/containers/dnsname): It is usually found in the `podman-plugins` or `podman-dnsname` distro packages, those packages are not pulled by default and you need to install them. This allows containers to be able to resolve each other if they are on the same CNI network.
|
||||
* Python3
|
||||
* [PyYAML](https://pyyaml.org/)
|
||||
* [python-dotenv](https://pypi.org/project/python-dotenv/)
|
||||
|
||||
And it's formed as a single python file script that you can drop into your PATH and run.
|
||||
And it's formed as a single Python file script that you can drop into your PATH and run.
|
||||
|
||||
## References:
|
||||
|
||||
@ -35,14 +38,18 @@ OpenShift/Kubernetes distribution like [OKD](https://www.okd.io/).
|
||||
|
||||
## Versions
|
||||
|
||||
If you have legacy version of `podman` (before 3.x) you might need to stick with legacy `podman-compose` `0.1.x` branch.
|
||||
If you have legacy version of `podman` (before 3.1.0) you might need to stick with legacy `podman-compose` `0.1.x` branch.
|
||||
The legacy branch 0.1.x uses mappings and workarounds to compensate for rootless limitations.
|
||||
|
||||
Modern podman versions (>=3.4) do not have those limitations and thus you can use latest and stable 1.x branch.
|
||||
Modern podman versions (>=3.4) do not have those limitations, and thus you can use latest and stable 1.x branch.
|
||||
|
||||
If you are upgrading from `podman-compose` version `0.1.x` then we no longer have global option `-t` to set mapping type
|
||||
like `hostnet`. If you desire that behavior, pass it the standard way like `network_mode: host` in the YAML.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
Install latest stable version from PyPI:
|
||||
Install the latest stable version from PyPI:
|
||||
|
||||
```
|
||||
pip3 install podman-compose
|
||||
@ -56,19 +63,6 @@ Or latest development version from GitHub:
|
||||
pip3 install https://github.com/containers/podman-compose/archive/devel.tar.gz
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
curl -o /usr/local/bin/podman-compose https://raw.githubusercontent.com/containers/podman-compose/devel/podman_compose.py
|
||||
chmod +x /usr/local/bin/podman-compose
|
||||
```
|
||||
|
||||
or inside your home
|
||||
|
||||
```
|
||||
curl -o ~/.local/bin/podman-compose https://raw.githubusercontent.com/containers/podman-compose/devel/podman_compose.py
|
||||
chmod +x ~/.local/bin/podman-compose
|
||||
```
|
||||
|
||||
or install from Fedora (starting from f31) repositories:
|
||||
|
||||
@ -79,6 +73,8 @@ sudo dnf install podman-compose
|
||||
## Basic Usage
|
||||
|
||||
We have included fully functional sample stacks inside `examples/` directory.
|
||||
You can get more examples from [awesome-compose](https://github.com/docker/awesome-compose).
|
||||
|
||||
|
||||
A quick example would be
|
||||
|
||||
@ -99,12 +95,21 @@ which have
|
||||
- a django tasks
|
||||
|
||||
|
||||
When testing the `AWX3` example, if you got errors just wait for db migrations to end.
|
||||
|
||||
When testing the `AWX3` example, if you got errors, just wait for db migrations to end.
|
||||
There is also AWX 17.1.0
|
||||
|
||||
## Tests
|
||||
|
||||
Inside `tests/` directory we have many useless docker-compose stacks
|
||||
that are meant to test as much cases as we can to make sure we are compatible
|
||||
that are meant to test as many cases as we can to make sure we are compatible
|
||||
|
||||
### Unit tests with pytest
|
||||
run a pytest with following command
|
||||
|
||||
```shell
|
||||
python -m pytest pytests
|
||||
```
|
||||
|
||||
# Contributing guide
|
||||
|
||||
If you are a user or a developer and want to contribute please check the [CONTRIBUTING](CONTRIBUTING.md) section
|
||||
|
411
completion/bash/podman-compose
Normal file
411
completion/bash/podman-compose
Normal file
@ -0,0 +1,411 @@
|
||||
# Naming convention:
|
||||
# * _camelCase for function names
|
||||
# * snake_case for variable names
|
||||
|
||||
# all functions will return 0 if they successfully complete the argument
|
||||
# (or establish there is no need or no way to complete), and something
|
||||
# other than 0 if that's not the case
|
||||
|
||||
# complete arguments to global options
|
||||
_completeGlobalOptArgs() {
|
||||
# arguments to options that take paths as arguments: complete paths
|
||||
for el in ${path_arg_global_opts}; do
|
||||
if [[ ${prev} == ${el} ]]; then
|
||||
COMPREPLY=( $(compgen -f -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
# arguments to options that take generic arguments: don't complete
|
||||
for el in ${generic_arg_global_opts}; do
|
||||
if [[ ${prev} == ${el} ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# complete root subcommands and options
|
||||
_completeRoot() {
|
||||
# if we're completing an option
|
||||
if [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${global_opts}" -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
# complete root commands
|
||||
COMPREPLY=( $(compgen -W "${root_commands}" -- ${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
# complete names of Compose services
|
||||
_completeServiceNames() {
|
||||
# ideally we should complete service names,
|
||||
# but parsing the compose spec file in the
|
||||
# completion script is quite complex
|
||||
return 0
|
||||
}
|
||||
|
||||
# complete commands to run inside containers
|
||||
_completeCommand() {
|
||||
# we would need to complete commands to run inside
|
||||
# a container
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# complete the arguments for `podman-compose up` and return 0
|
||||
_completeUpArgs() {
|
||||
up_opts="${help_opts} -d --detach --no-color --quiet-pull --no-deps --force-recreate --always-recreate-deps --no-recreate --no-build --no-start --build --abort-on-container-exit -t --timeout -V --renew-anon-volumes --remove-orphans --scale --exit-code-from --pull --pull-always --build-arg --no-cache"
|
||||
if [[ ${prev} == "--scale" || ${prev} == "-t" || ${prev} == "--timeout" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${up_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose exec` and return 0
|
||||
_completeExecArgs() {
|
||||
exec_opts="${help_opts} -d --detach --privileged -u --user -T --index -e --env -w --workdir"
|
||||
if [[ ${prev} == "-u" || ${prev} == "--user" || ${prev} == "--index" || ${prev} == "-e" || ${prev} == "--env" || ${prev} == "-w" || ${prev} == "--workdir" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${exec_opts}" -- ${cur}) )
|
||||
return 0
|
||||
elif [[ ${comp_cword_adj} -eq 2 ]]; then
|
||||
# complete service name
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
elif [[ ${comp_cword_adj} -eq 3 ]]; then
|
||||
_completeCommand
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
|
||||
# complete the arguments for `podman-compose down` and return 0
|
||||
_completeDownArgs() {
|
||||
down_opts="${help_opts} -v --volumes -t --timeout --remove-orphans"
|
||||
if [[ ${prev} == "-t" || ${prev} == "--timeout" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${down_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
|
||||
# complete the arguments for `podman-compose build` and return 0
|
||||
_completeBuildArgs() {
|
||||
build_opts="${help_opts} --pull --pull-always --build-arg --no-cache"
|
||||
if [[ ${prev} == "--build-arg" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${build_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose logs` and return 0
|
||||
_completeLogsArgs() {
|
||||
logs_opts="${help_opts} -f --follow -l --latest -n --names --since -t --timestamps --tail --until"
|
||||
if [[ ${prev} == "--since" || ${prev} == "--tail" || ${prev} == "--until" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${logs_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose ps` and return 0
|
||||
_completePsArgs() {
|
||||
ps_opts="${help_opts} -q --quiet"
|
||||
if [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${ps_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose pull` and return 0
|
||||
_completePullArgs() {
|
||||
pull_opts="${help_opts} --force-local"
|
||||
if [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${pull_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose push` and return 0
|
||||
_completePushArgs() {
|
||||
push_opts="${help_opts} --ignore-push-failures"
|
||||
if [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${push_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose restart` and return 0
|
||||
_completeRestartArgs() {
|
||||
restart_opts="${help_opts} -t --timeout"
|
||||
if [[ ${prev} == "-t" || ${prev} == "--timeout" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${restart_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose stop` and return 0
|
||||
_completeStopArgs() {
|
||||
stop_opts="${help_opts} -t --timeout"
|
||||
if [[ ${prev} == "-t" || ${prev} == "--timeout" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${stop_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose start` and return 0
|
||||
_completeStartArgs() {
|
||||
start_opts="${help_opts}"
|
||||
if [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${start_opts}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# complete the arguments for `podman-compose run` and return 0
|
||||
_completeRunArgs() {
|
||||
run_opts="${help_opts} -d --detach --privileged -u --user -T --index -e --env -w --workdir"
|
||||
if [[ ${prev} == "-u" || ${prev} == "--user" || ${prev} == "--index" || ${prev} == "-e" || ${prev} == "--env" || ${prev} == "-w" || ${prev} == "--workdir" ]]; then
|
||||
return 0
|
||||
elif [[ ${cur} == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W "${run_opts}" -- ${cur}) )
|
||||
return 0
|
||||
elif [[ ${comp_cword_adj} -eq 2 ]]; then
|
||||
# complete service name
|
||||
_completeServiceNames
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
elif [[ ${comp_cword_adj} -eq 3 ]]; then
|
||||
_completeCommand
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
_podmanCompose() {
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
root_commands="help version pull push build up down ps run exec start stop restart logs"
|
||||
|
||||
# options to output help text (used as global and subcommand options)
|
||||
help_opts="-h --help"
|
||||
|
||||
# global options that don't take additional arguments
|
||||
basic_global_opts="${help_opts} -v --no-ansi --no-cleanup --dry-run"
|
||||
|
||||
# global options that take paths as arguments
|
||||
path_arg_global_opts="-f --file --podman-path"
|
||||
path_arg_global_opts_array=($arg_global_opts)
|
||||
|
||||
# global options that take arguments that are not files
|
||||
generic_arg_global_opts="-p --project-name --podman-path --podman-args --podman-pull-args --podman-push-args --podman-build-args --podman-inspect-args --podman-run-args --podman-start-args --podman-stop-args --podman-rm-args --podman-volume-args"
|
||||
generic_arg_global_opts_array=($generic_arg_global_opts)
|
||||
|
||||
# all global options that take arguments
|
||||
arg_global_opts="${path_arg_global_opts} ${generic_arg_global_opts}"
|
||||
arg_global_opts_array=($arg_global_opts)
|
||||
|
||||
# all global options
|
||||
global_opts="${basic_global_opts} ${arg_global_opts}"
|
||||
|
||||
chosen_root_command=""
|
||||
|
||||
|
||||
_completeGlobalOptArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# computing comp_cword_adj, which thruthfully tells us how deep in the subcommands tree we are
|
||||
# additionally, set the chosen_root_command if possible
|
||||
comp_cword_adj=${COMP_CWORD}
|
||||
if [[ ${COMP_CWORD} -ge 2 ]]; then
|
||||
skip_next="no"
|
||||
for el in ${COMP_WORDS[@]}; do
|
||||
# if the user has asked for help text there's no need to complete further
|
||||
if [[ ${el} == "-h" || ${el} == "--help" ]]; then
|
||||
return 0
|
||||
fi
|
||||
if [[ ${skip_next} == "yes" ]]; then
|
||||
let "comp_cword_adj--"
|
||||
skip_next="no"
|
||||
continue
|
||||
fi
|
||||
if [[ ${el} == -* && ${el} != ${cur} ]]; then
|
||||
let "comp_cword_adj--"
|
||||
|
||||
for opt in ${arg_global_opts_array[@]}; do
|
||||
if [[ ${el} == ${opt} ]]; then
|
||||
skip_next="yes"
|
||||
fi
|
||||
done
|
||||
elif [[ ${el} != ${cur} && ${el} != ${COMP_WORDS[0]} && ${chosen_root_command} == "" ]]; then
|
||||
chosen_root_command=${el}
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ ${comp_cword_adj} -eq 1 ]]; then
|
||||
_completeRoot
|
||||
|
||||
# Given that we check the value of comp_cword_adj outside
|
||||
# of it, at the moment _completeRoot should always return
|
||||
# 0, this is just here in case changes are made. The same
|
||||
# will apply to similar functions below
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
case $chosen_root_command in
|
||||
up)
|
||||
_completeUpArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
down)
|
||||
_completeDownArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
exec)
|
||||
_completeExecArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
build)
|
||||
_completeBuildArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
logs)
|
||||
_completeLogsArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
ps)
|
||||
_completePsArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
pull)
|
||||
_completePullArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
push)
|
||||
_completePushArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
restart)
|
||||
_completeRestartArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
start)
|
||||
_completeStartArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
stop)
|
||||
_completeStopArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
run)
|
||||
_completeRunArgs
|
||||
if [[ $? -eq 0 ]]; then
|
||||
return 0
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _podmanCompose podman-compose
|
37
examples/awx17/README.md
Normal file
37
examples/awx17/README.md
Normal file
@ -0,0 +1,37 @@
|
||||
# AWX Compose
|
||||
|
||||
the directory roles is taken from [here](https://github.com/ansible/awx/tree/17.1.0/installer/roles/local_docker)
|
||||
|
||||
also look at https://github.com/ansible/awx/tree/17.1.0/tools/docker-compose
|
||||
|
||||
```
|
||||
mkdir deploy awx17
|
||||
ansible localhost \
|
||||
-e host_port=8080 \
|
||||
-e awx_secret_key='awx,secret.123' \
|
||||
-e secret_key='awx,secret.123' \
|
||||
-e admin_user='admin' \
|
||||
-e admin_password='admin' \
|
||||
-e pg_password='awx,123.' \
|
||||
-e pg_username='awx' \
|
||||
-e pg_database='awx' \
|
||||
-e pg_port='5432' \
|
||||
-e redis_image="docker.io/library/redis:6-alpine" \
|
||||
-e postgres_data_dir="./data/pg" \
|
||||
-e compose_start_containers=false \
|
||||
-e dockerhub_base='docker.io/ansible' \
|
||||
-e awx_image='docker.io/ansible/awx' \
|
||||
-e awx_version='17.1.0' \
|
||||
-e dockerhub_version='17.1.0' \
|
||||
-e docker_deploy_base_path=$PWD/deploy \
|
||||
-e docker_compose_dir=$PWD/awx17 \
|
||||
-e awx_task_hostname=awx \
|
||||
-e awx_web_hostname=awxweb \
|
||||
-m include_role -a name=local_docker
|
||||
cp awx17/docker-compose.yml awx17/docker-compose.yml.orig
|
||||
sed -i -re "s#- \"$PWD/awx17/(.*):/#- \"./\1:/#" awx17/docker-compose.yml
|
||||
cd awx17
|
||||
podman-compose run --rm --service-ports task awx-manage migrate --no-input
|
||||
podman-compose up -d
|
||||
```
|
||||
|
11
examples/awx17/roles/local_docker/defaults/main.yml
Normal file
11
examples/awx17/roles/local_docker/defaults/main.yml
Normal file
@ -0,0 +1,11 @@
|
||||
---
|
||||
dockerhub_version: "{{ lookup('file', playbook_dir + '/../VERSION') }}"
|
||||
|
||||
awx_image: "awx"
|
||||
redis_image: "redis"
|
||||
|
||||
postgresql_version: "12"
|
||||
postgresql_image: "postgres:{{postgresql_version}}"
|
||||
|
||||
compose_start_containers: true
|
||||
upgrade_postgres: false
|
74
examples/awx17/roles/local_docker/tasks/compose.yml
Normal file
74
examples/awx17/roles/local_docker/tasks/compose.yml
Normal file
@ -0,0 +1,74 @@
|
||||
---
|
||||
- name: Create {{ docker_compose_dir }} directory
|
||||
file:
|
||||
path: "{{ docker_compose_dir }}"
|
||||
state: directory
|
||||
|
||||
- name: Create Redis socket directory
|
||||
file:
|
||||
path: "{{ docker_compose_dir }}/redis_socket"
|
||||
state: directory
|
||||
mode: 0777
|
||||
|
||||
- name: Create Docker Compose Configuration
|
||||
template:
|
||||
src: "{{ item.file }}.j2"
|
||||
dest: "{{ docker_compose_dir }}/{{ item.file }}"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- file: environment.sh
|
||||
mode: "0600"
|
||||
- file: credentials.py
|
||||
mode: "0600"
|
||||
- file: docker-compose.yml
|
||||
mode: "0600"
|
||||
- file: nginx.conf
|
||||
mode: "0600"
|
||||
- file: redis.conf
|
||||
mode: "0664"
|
||||
register: awx_compose_config
|
||||
|
||||
- name: Render SECRET_KEY file
|
||||
copy:
|
||||
content: "{{ secret_key }}"
|
||||
dest: "{{ docker_compose_dir }}/SECRET_KEY"
|
||||
mode: 0600
|
||||
register: awx_secret_key
|
||||
|
||||
- block:
|
||||
- name: Remove AWX containers before migrating postgres so that the old postgres container does not get used
|
||||
docker_compose:
|
||||
project_src: "{{ docker_compose_dir }}"
|
||||
state: absent
|
||||
ignore_errors: true
|
||||
|
||||
- name: Run migrations in task container
|
||||
shell: docker-compose run --rm --service-ports task awx-manage migrate --no-input
|
||||
args:
|
||||
chdir: "{{ docker_compose_dir }}"
|
||||
|
||||
- name: Start the containers
|
||||
docker_compose:
|
||||
project_src: "{{ docker_compose_dir }}"
|
||||
restarted: "{{ awx_compose_config is changed or awx_secret_key is changed }}"
|
||||
register: awx_compose_start
|
||||
|
||||
- name: Update CA trust in awx_web container
|
||||
command: docker exec awx_web '/usr/bin/update-ca-trust'
|
||||
when: awx_compose_config.changed or awx_compose_start.changed
|
||||
|
||||
- name: Update CA trust in awx_task container
|
||||
command: docker exec awx_task '/usr/bin/update-ca-trust'
|
||||
when: awx_compose_config.changed or awx_compose_start.changed
|
||||
|
||||
- name: Wait for launch script to create user
|
||||
wait_for:
|
||||
timeout: 10
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Create Preload data
|
||||
command: docker exec awx_task bash -c "/usr/bin/awx-manage create_preload_data"
|
||||
when: create_preload_data|bool
|
||||
register: cdo
|
||||
changed_when: "'added' in cdo.stdout"
|
||||
when: compose_start_containers|bool
|
15
examples/awx17/roles/local_docker/tasks/main.yml
Normal file
15
examples/awx17/roles/local_docker/tasks/main.yml
Normal file
@ -0,0 +1,15 @@
|
||||
---
|
||||
- name: Generate broadcast websocket secret
|
||||
set_fact:
|
||||
broadcast_websocket_secret: "{{ lookup('password', '/dev/null length=128') }}"
|
||||
run_once: true
|
||||
no_log: true
|
||||
when: broadcast_websocket_secret is not defined
|
||||
|
||||
- import_tasks: upgrade_postgres.yml
|
||||
when:
|
||||
- postgres_data_dir is defined
|
||||
- pg_hostname is not defined
|
||||
|
||||
- import_tasks: set_image.yml
|
||||
- import_tasks: compose.yml
|
46
examples/awx17/roles/local_docker/tasks/set_image.yml
Normal file
46
examples/awx17/roles/local_docker/tasks/set_image.yml
Normal file
@ -0,0 +1,46 @@
|
||||
---
|
||||
- name: Manage AWX Container Images
|
||||
block:
|
||||
- name: Export Docker awx image if it isn't local and there isn't a registry defined
|
||||
docker_image:
|
||||
name: "{{ awx_image }}"
|
||||
tag: "{{ awx_version }}"
|
||||
archive_path: "{{ awx_local_base_config_path|default('/tmp') }}/{{ awx_image }}_{{ awx_version }}.tar"
|
||||
when: inventory_hostname != "localhost" and docker_registry is not defined
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Set docker base path
|
||||
set_fact:
|
||||
docker_deploy_base_path: "{{ awx_base_path|default('/tmp') }}/docker_deploy"
|
||||
when: ansible_connection != "local" and docker_registry is not defined
|
||||
|
||||
- name: Ensure directory exists
|
||||
file:
|
||||
path: "{{ docker_deploy_base_path }}"
|
||||
state: directory
|
||||
when: ansible_connection != "local" and docker_registry is not defined
|
||||
|
||||
- name: Copy awx image to docker execution
|
||||
copy:
|
||||
src: "{{ awx_local_base_config_path|default('/tmp') }}/{{ awx_image }}_{{ awx_version }}.tar"
|
||||
dest: "{{ docker_deploy_base_path }}/{{ awx_image }}_{{ awx_version }}.tar"
|
||||
when: ansible_connection != "local" and docker_registry is not defined
|
||||
|
||||
- name: Load awx image
|
||||
docker_image:
|
||||
name: "{{ awx_image }}"
|
||||
tag: "{{ awx_version }}"
|
||||
load_path: "{{ docker_deploy_base_path }}/{{ awx_image }}_{{ awx_version }}.tar"
|
||||
timeout: 300
|
||||
when: ansible_connection != "local" and docker_registry is not defined
|
||||
|
||||
- name: Set full image path for local install
|
||||
set_fact:
|
||||
awx_docker_actual_image: "{{ awx_image }}:{{ awx_version }}"
|
||||
when: docker_registry is not defined
|
||||
when: dockerhub_base is not defined
|
||||
|
||||
- name: Set DockerHub Image Paths
|
||||
set_fact:
|
||||
awx_docker_actual_image: "{{ dockerhub_base }}/awx:{{ dockerhub_version }}"
|
||||
when: dockerhub_base is defined
|
64
examples/awx17/roles/local_docker/tasks/upgrade_postgres.yml
Normal file
64
examples/awx17/roles/local_docker/tasks/upgrade_postgres.yml
Normal file
@ -0,0 +1,64 @@
|
||||
---
|
||||
|
||||
- name: Create {{ postgres_data_dir }} directory
|
||||
file:
|
||||
path: "{{ postgres_data_dir }}"
|
||||
state: directory
|
||||
|
||||
- name: Get full path of postgres data dir
|
||||
shell: "echo {{ postgres_data_dir }}"
|
||||
register: fq_postgres_data_dir
|
||||
|
||||
- name: Register temporary docker container
|
||||
set_fact:
|
||||
container_command: "docker run --rm -v '{{ fq_postgres_data_dir.stdout }}:/var/lib/postgresql' centos:8 bash -c "
|
||||
|
||||
- name: Check for existing Postgres data (run from inside the container for access to file)
|
||||
shell:
|
||||
cmd: |
|
||||
{{ container_command }} "[[ -f /var/lib/postgresql/10/data/PG_VERSION ]] && echo 'exists'"
|
||||
register: pg_version_file
|
||||
ignore_errors: true
|
||||
|
||||
- name: Record Postgres version
|
||||
shell: |
|
||||
{{ container_command }} "cat /var/lib/postgresql/10/data/PG_VERSION"
|
||||
register: old_pg_version
|
||||
when: pg_version_file is defined and pg_version_file.stdout == 'exists'
|
||||
|
||||
- name: Determine whether to upgrade postgres
|
||||
set_fact:
|
||||
upgrade_postgres: "{{ old_pg_version.stdout == '10' }}"
|
||||
when: old_pg_version.changed
|
||||
|
||||
- name: Set up new postgres paths pre-upgrade
|
||||
shell: |
|
||||
{{ container_command }} "mkdir -p /var/lib/postgresql/12/data/"
|
||||
when: upgrade_postgres | bool
|
||||
|
||||
- name: Stop AWX before upgrading postgres
|
||||
docker_compose:
|
||||
project_src: "{{ docker_compose_dir }}"
|
||||
stopped: true
|
||||
when: upgrade_postgres | bool
|
||||
|
||||
- name: Upgrade Postgres
|
||||
shell: |
|
||||
docker run --rm \
|
||||
-v {{ postgres_data_dir }}/10/data:/var/lib/postgresql/10/data \
|
||||
-v {{ postgres_data_dir }}/12/data:/var/lib/postgresql/12/data \
|
||||
-e PGUSER={{ pg_username }} -e POSTGRES_INITDB_ARGS="-U {{ pg_username }}" \
|
||||
tianon/postgres-upgrade:10-to-12 --username={{ pg_username }}
|
||||
when: upgrade_postgres | bool
|
||||
|
||||
- name: Copy old pg_hba.conf
|
||||
shell: |
|
||||
{{ container_command }} "cp /var/lib/postgresql/10/data/pg_hba.conf /var/lib/postgresql/12/data/pg_hba.conf"
|
||||
when: upgrade_postgres | bool
|
||||
|
||||
- name: Remove old data directory
|
||||
shell: |
|
||||
{{ container_command }} "rm -rf /var/lib/postgresql/10/data"
|
||||
when:
|
||||
- upgrade_postgres | bool
|
||||
- compose_start_containers|bool
|
@ -0,0 +1,13 @@
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ATOMIC_REQUESTS': True,
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': "{{ pg_database }}",
|
||||
'USER': "{{ pg_username }}",
|
||||
'PASSWORD': "{{ pg_password }}",
|
||||
'HOST': "{{ pg_hostname | default('postgres') }}",
|
||||
'PORT': "{{ pg_port }}",
|
||||
}
|
||||
}
|
||||
|
||||
BROADCAST_WEBSOCKET_SECRET = "{{ broadcast_websocket_secret | b64encode }}"
|
@ -0,0 +1,208 @@
|
||||
#jinja2: lstrip_blocks: True
|
||||
version: '2'
|
||||
services:
|
||||
|
||||
web:
|
||||
image: {{ awx_docker_actual_image }}
|
||||
container_name: awx_web
|
||||
depends_on:
|
||||
- redis
|
||||
{% if pg_hostname is not defined %}
|
||||
- postgres
|
||||
{% endif %}
|
||||
{% if (host_port is defined) or (host_port_ssl is defined) %}
|
||||
ports:
|
||||
{% if (host_port_ssl is defined) and (ssl_certificate is defined) %}
|
||||
- "{{ host_port_ssl }}:8053"
|
||||
{% endif %}
|
||||
{% if host_port is defined %}
|
||||
- "{{ host_port }}:8052"
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
hostname: {{ awx_web_hostname }}
|
||||
user: root
|
||||
restart: unless-stopped
|
||||
{% if (awx_web_container_labels is defined) and (',' in awx_web_container_labels) %}
|
||||
{% set awx_web_container_labels_list = awx_web_container_labels.split(',') %}
|
||||
labels:
|
||||
{% for awx_web_container_label in awx_web_container_labels_list %}
|
||||
- {{ awx_web_container_label }}
|
||||
{% endfor %}
|
||||
{% elif awx_web_container_labels is defined %}
|
||||
labels:
|
||||
- {{ awx_web_container_labels }}
|
||||
{% endif %}
|
||||
volumes:
|
||||
- supervisor-socket:/var/run/supervisor
|
||||
- rsyslog-socket:/var/run/awx-rsyslog/
|
||||
- rsyslog-config:/var/lib/awx/rsyslog/
|
||||
- "{{ docker_compose_dir }}/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "{{ docker_compose_dir }}/environment.sh:/etc/tower/conf.d/environment.sh"
|
||||
- "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py"
|
||||
- "{{ docker_compose_dir }}/nginx.conf:/etc/nginx/nginx.conf:ro"
|
||||
- "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw"
|
||||
{% if project_data_dir is defined %}
|
||||
- "{{ project_data_dir +':/var/lib/awx/projects:rw' }}"
|
||||
{% endif %}
|
||||
{% if custom_venv_dir is defined %}
|
||||
- "{{ custom_venv_dir +':'+ custom_venv_dir +':rw' }}"
|
||||
{% endif %}
|
||||
{% if ca_trust_dir is defined %}
|
||||
- "{{ ca_trust_dir +':/etc/pki/ca-trust/source/anchors:ro' }}"
|
||||
{% endif %}
|
||||
{% if (ssl_certificate is defined) and (ssl_certificate_key is defined) %}
|
||||
- "{{ ssl_certificate +':/etc/nginx/awxweb.pem:ro' }}"
|
||||
- "{{ ssl_certificate_key +':/etc/nginx/awxweb_key.pem:ro' }}"
|
||||
{% elif (ssl_certificate is defined) and (ssl_certificate_key is not defined) %}
|
||||
- "{{ ssl_certificate +':/etc/nginx/awxweb.pem:ro' }}"
|
||||
{% endif %}
|
||||
{% if (awx_container_search_domains is defined) and (',' in awx_container_search_domains) %}
|
||||
{% set awx_container_search_domains_list = awx_container_search_domains.split(',') %}
|
||||
dns_search:
|
||||
{% for awx_container_search_domain in awx_container_search_domains_list %}
|
||||
- {{ awx_container_search_domain }}
|
||||
{% endfor %}
|
||||
{% elif awx_container_search_domains is defined %}
|
||||
dns_search: "{{ awx_container_search_domains }}"
|
||||
{% endif %}
|
||||
{% if (awx_alternate_dns_servers is defined) and (',' in awx_alternate_dns_servers) %}
|
||||
{% set awx_alternate_dns_servers_list = awx_alternate_dns_servers.split(',') %}
|
||||
dns:
|
||||
{% for awx_alternate_dns_server in awx_alternate_dns_servers_list %}
|
||||
- {{ awx_alternate_dns_server }}
|
||||
{% endfor %}
|
||||
{% elif awx_alternate_dns_servers is defined %}
|
||||
dns: "{{ awx_alternate_dns_servers }}"
|
||||
{% endif %}
|
||||
{% if (docker_compose_extra_hosts is defined) and (':' in docker_compose_extra_hosts) %}
|
||||
{% set docker_compose_extra_hosts_list = docker_compose_extra_hosts.split(',') %}
|
||||
extra_hosts:
|
||||
{% for docker_compose_extra_host in docker_compose_extra_hosts_list %}
|
||||
- "{{ docker_compose_extra_host }}"
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
environment:
|
||||
http_proxy: {{ http_proxy | default('') }}
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
{% if docker_logger is defined %}
|
||||
logging:
|
||||
driver: {{ docker_logger }}
|
||||
{% endif %}
|
||||
|
||||
task:
|
||||
image: {{ awx_docker_actual_image }}
|
||||
container_name: awx_task
|
||||
depends_on:
|
||||
- redis
|
||||
- web
|
||||
{% if pg_hostname is not defined %}
|
||||
- postgres
|
||||
{% endif %}
|
||||
command: /usr/bin/launch_awx_task.sh
|
||||
hostname: {{ awx_task_hostname }}
|
||||
user: root
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- supervisor-socket:/var/run/supervisor
|
||||
- rsyslog-socket:/var/run/awx-rsyslog/
|
||||
- rsyslog-config:/var/lib/awx/rsyslog/
|
||||
- "{{ docker_compose_dir }}/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "{{ docker_compose_dir }}/environment.sh:/etc/tower/conf.d/environment.sh"
|
||||
- "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py"
|
||||
- "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw"
|
||||
{% if project_data_dir is defined %}
|
||||
- "{{ project_data_dir +':/var/lib/awx/projects:rw' }}"
|
||||
{% endif %}
|
||||
{% if custom_venv_dir is defined %}
|
||||
- "{{ custom_venv_dir +':'+ custom_venv_dir +':rw' }}"
|
||||
{% endif %}
|
||||
{% if ca_trust_dir is defined %}
|
||||
- "{{ ca_trust_dir +':/etc/pki/ca-trust/source/anchors:ro' }}"
|
||||
{% endif %}
|
||||
{% if ssl_certificate is defined %}
|
||||
- "{{ ssl_certificate +':/etc/nginx/awxweb.pem:ro' }}"
|
||||
{% endif %}
|
||||
{% if (awx_container_search_domains is defined) and (',' in awx_container_search_domains) %}
|
||||
{% set awx_container_search_domains_list = awx_container_search_domains.split(',') %}
|
||||
dns_search:
|
||||
{% for awx_container_search_domain in awx_container_search_domains_list %}
|
||||
- {{ awx_container_search_domain }}
|
||||
{% endfor %}
|
||||
{% elif awx_container_search_domains is defined %}
|
||||
dns_search: "{{ awx_container_search_domains }}"
|
||||
{% endif %}
|
||||
{% if (awx_alternate_dns_servers is defined) and (',' in awx_alternate_dns_servers) %}
|
||||
{% set awx_alternate_dns_servers_list = awx_alternate_dns_servers.split(',') %}
|
||||
dns:
|
||||
{% for awx_alternate_dns_server in awx_alternate_dns_servers_list %}
|
||||
- {{ awx_alternate_dns_server }}
|
||||
{% endfor %}
|
||||
{% elif awx_alternate_dns_servers is defined %}
|
||||
dns: "{{ awx_alternate_dns_servers }}"
|
||||
{% endif %}
|
||||
{% if (docker_compose_extra_hosts is defined) and (':' in docker_compose_extra_hosts) %}
|
||||
{% set docker_compose_extra_hosts_list = docker_compose_extra_hosts.split(',') %}
|
||||
extra_hosts:
|
||||
{% for docker_compose_extra_host in docker_compose_extra_hosts_list %}
|
||||
- "{{ docker_compose_extra_host }}"
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
environment:
|
||||
AWX_SKIP_MIGRATIONS: "1"
|
||||
http_proxy: {{ http_proxy | default('') }}
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
SUPERVISOR_WEB_CONFIG_PATH: '/etc/supervisord.conf'
|
||||
|
||||
redis:
|
||||
image: {{ redis_image }}
|
||||
container_name: awx_redis
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
http_proxy: {{ http_proxy | default('') }}
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
command: ["/usr/local/etc/redis/redis.conf"]
|
||||
volumes:
|
||||
- "{{ docker_compose_dir }}/redis.conf:/usr/local/etc/redis/redis.conf:ro"
|
||||
- "{{ docker_compose_dir }}/redis_socket:/var/run/redis/:rw"
|
||||
{% if docker_logger is defined %}
|
||||
logging:
|
||||
driver: {{ docker_logger }}
|
||||
{% endif %}
|
||||
|
||||
{% if pg_hostname is not defined %}
|
||||
postgres:
|
||||
image: {{ postgresql_image }}
|
||||
container_name: awx_postgres
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- "{{ postgres_data_dir }}/12/data/:/var/lib/postgresql/data:Z"
|
||||
environment:
|
||||
POSTGRES_USER: {{ pg_username }}
|
||||
POSTGRES_PASSWORD: {{ pg_password }}
|
||||
POSTGRES_DB: {{ pg_database }}
|
||||
http_proxy: {{ http_proxy | default('') }}
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
{% if docker_logger is defined %}
|
||||
logging:
|
||||
driver: {{ docker_logger }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if docker_compose_subnet is defined %}
|
||||
networks:
|
||||
default:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ docker_compose_subnet }}
|
||||
{% endif %}
|
||||
|
||||
volumes:
|
||||
supervisor-socket:
|
||||
rsyslog-socket:
|
||||
rsyslog-config:
|
@ -0,0 +1,10 @@
|
||||
DATABASE_USER={{ pg_username|quote }}
|
||||
DATABASE_NAME={{ pg_database|quote }}
|
||||
DATABASE_HOST={{ pg_hostname|default('postgres')|quote }}
|
||||
DATABASE_PORT={{ pg_port|default('5432')|quote }}
|
||||
DATABASE_PASSWORD={{ pg_password|default('awxpass')|quote }}
|
||||
{% if pg_admin_password is defined %}
|
||||
DATABASE_ADMIN_PASSWORD={{ pg_admin_password|quote }}
|
||||
{% endif %}
|
||||
AWX_ADMIN_USER={{ admin_user|quote }}
|
||||
AWX_ADMIN_PASSWORD={{ admin_password|quote }}
|
122
examples/awx17/roles/local_docker/templates/nginx.conf.j2
Normal file
122
examples/awx17/roles/local_docker/templates/nginx.conf.j2
Normal file
@ -0,0 +1,122 @@
|
||||
#user awx;
|
||||
|
||||
worker_processes 1;
|
||||
|
||||
pid /tmp/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
server_tokens off;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /dev/stdout main;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
#gzip on;
|
||||
|
||||
upstream uwsgi {
|
||||
server 127.0.0.1:8050;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server 127.0.0.1:8051;
|
||||
}
|
||||
|
||||
{% if ssl_certificate is defined %}
|
||||
server {
|
||||
listen 8052 default_server;
|
||||
server_name _;
|
||||
|
||||
# Redirect all HTTP links to the matching HTTPS page
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
{%endif %}
|
||||
|
||||
server {
|
||||
{% if (ssl_certificate is defined) and (ssl_certificate_key is defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb_key.pem;
|
||||
{% elif (ssl_certificate is defined) and (ssl_certificate_key is not defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb.pem;
|
||||
{% else %}
|
||||
listen 8052 default_server;
|
||||
{% endif %}
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
|
||||
# Protect against click-jacking https://www.owasp.org/index.php/Testing_for_Clickjacking_(OTG-CLIENT-009)
|
||||
add_header X-Frame-Options "DENY";
|
||||
|
||||
location /nginx_status {
|
||||
stub_status on;
|
||||
access_log off;
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
|
||||
location /favicon.ico { alias /var/lib/awx/public/static/favicon.ico; }
|
||||
|
||||
location /websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*)$http_host(.*[^/])$ $1$http_host$2/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
{%- if extra_nginx_include is defined %}
|
||||
include {{ extra_nginx_include }};
|
||||
{%- endif %}
|
||||
proxy_set_header X-Forwarded-Port 443;
|
||||
uwsgi_param HTTP_X_FORWARDED_PORT 443;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
unixsocket /var/run/redis/redis.sock
|
||||
unixsocketperm 660
|
||||
port 0
|
||||
bind 127.0.0.1
|
17
examples/azure-vote/README.md
Normal file
17
examples/azure-vote/README.md
Normal file
@ -0,0 +1,17 @@
|
||||
# Azure Vote Example
|
||||
|
||||
This example have two containers:
|
||||
|
||||
* backend: `redis` used as storage
|
||||
* frontend: having supervisord, nginx, uwsgi/python
|
||||
|
||||
|
||||
```
|
||||
echo "HOST_PORT=8080" > .env
|
||||
podman-compose up
|
||||
```
|
||||
|
||||
after typing the commands above open your browser on the host port you picked above like
|
||||
[http://localhost:8080/](http://localhost:8080/)
|
||||
|
||||
|
16
examples/azure-vote/docker-compose.yaml
Normal file
16
examples/azure-vote/docker-compose.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
# from https://github.com/Azure-Samples/azure-voting-app-redis/blob/master/docker-compose.yaml
|
||||
version: '3'
|
||||
services:
|
||||
azure-vote-back:
|
||||
image: mcr.microsoft.com/oss/bitnami/redis:6.0.8
|
||||
container_name: azure-vote-back
|
||||
environment:
|
||||
ALLOW_EMPTY_PASSWORD: "yes"
|
||||
azure-vote-front:
|
||||
image: mcr.microsoft.com/azuredocs/azure-vote-front:v1
|
||||
environment:
|
||||
REDIS: azure-vote-back
|
||||
ports:
|
||||
- "${HOST_PORT:-8080}:80"
|
||||
|
31
examples/echo/README.md
Normal file
31
examples/echo/README.md
Normal file
@ -0,0 +1,31 @@
|
||||
# Echo Service example
|
||||
|
||||
```
|
||||
podman-compose up
|
||||
```
|
||||
|
||||
Test the service with `curl like this`
|
||||
|
||||
```
|
||||
$ curl -X POST -d "foobar" http://localhost:8080/; echo
|
||||
|
||||
CLIENT VALUES:
|
||||
client_address=10.89.31.2
|
||||
command=POST
|
||||
real path=/
|
||||
query=nil
|
||||
request_version=1.1
|
||||
request_uri=http://localhost:8080/
|
||||
|
||||
SERVER VALUES:
|
||||
server_version=nginx: 1.10.0 - lua: 10001
|
||||
|
||||
HEADERS RECEIVED:
|
||||
accept=*/*
|
||||
content-length=6
|
||||
content-type=application/x-www-form-urlencoded
|
||||
host=localhost:8080
|
||||
user-agent=curl/7.76.1
|
||||
BODY:
|
||||
foobar
|
||||
```
|
8
examples/echo/docker-compose.yaml
Normal file
8
examples/echo/docker-compose.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
---
|
||||
version: '3'
|
||||
services:
|
||||
web:
|
||||
image: k8s.gcr.io/echoserver:1.4
|
||||
ports:
|
||||
- "${HOST_PORT:-8080}:8080"
|
||||
|
12
examples/hello-app-redis/README.md
Normal file
12
examples/hello-app-redis/README.md
Normal file
@ -0,0 +1,12 @@
|
||||
# GCR Hello App Redis
|
||||
|
||||
A 6-node redis cluster using [Bitnami](https://github.com/bitnami/bitnami-docker-redis-cluster)
|
||||
with a [simple hit counter](https://github.com/GoogleCloudPlatform/kubernetes-engine-samples/tree/main/hello-app-redis) that persists on that redis cluster
|
||||
|
||||
```
|
||||
podman-compose up
|
||||
```
|
||||
|
||||
then open your browser on [http://localhost:8080/](http://localhost:8080/)
|
||||
|
||||
|
67
examples/hello-app-redis/docker-compose.yaml
Normal file
67
examples/hello-app-redis/docker-compose.yaml
Normal file
@ -0,0 +1,67 @@
|
||||
---
|
||||
version: '3'
|
||||
volumes:
|
||||
redis-node1-data:
|
||||
redis-node2-data:
|
||||
redis-node3-data:
|
||||
redis-node4-data:
|
||||
redis-node5-data:
|
||||
redis-data:
|
||||
services:
|
||||
web:
|
||||
image: gcr.io/google-samples/hello-app-redis:1.0
|
||||
depends_on:
|
||||
- redis-cluster
|
||||
ports:
|
||||
- "${HOST_PORT:-8080}:8080"
|
||||
redis-node1:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-node1-data:/bitnami/redis/data
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
redis-node2:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-node2-data:/bitnami/redis/data
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
redis-node3:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-node3-data:/bitnami/redis/data
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
redis-node4:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-node4-data:/bitnami/redis/data
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
redis-node5:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-node5-data:/bitnami/redis/data
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
|
||||
redis-cluster:
|
||||
image: docker.io/bitnami/redis-cluster:6.2
|
||||
volumes:
|
||||
- redis-data:/bitnami/redis/data
|
||||
depends_on:
|
||||
- redis-node1
|
||||
- redis-node2
|
||||
- redis-node3
|
||||
- redis-node4
|
||||
- redis-node5
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
- REDIS_NODES=redis-node1 redis-node2 redis-node3 redis-node4 redis-node5 redis-cluster
|
||||
- REDIS_CLUSTER_CREATOR=yes
|
||||
|
10
examples/hello-app/README.md
Normal file
10
examples/hello-app/README.md
Normal file
@ -0,0 +1,10 @@
|
||||
# GCR Hello App
|
||||
|
||||
A small ~2MB image, type
|
||||
|
||||
```
|
||||
podman-compose up
|
||||
```
|
||||
|
||||
then open your browser on [http://localhost:8080/](http://localhost:8080/)
|
||||
|
8
examples/hello-app/docker-compose.yaml
Normal file
8
examples/hello-app/docker-compose.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
---
|
||||
version: '3'
|
||||
services:
|
||||
web:
|
||||
image: gcr.io/google-samples/hello-app:1.0
|
||||
ports:
|
||||
- "${HOST_PORT:-8080}:8080"
|
||||
|
12
examples/hello-python/Dockerfile
Normal file
12
examples/hello-python/Dockerfile
Normal file
@ -0,0 +1,12 @@
|
||||
FROM python:3.9-alpine
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD [ "python", "-m", "app.web" ]
|
||||
EXPOSE 8080
|
||||
|
8
examples/hello-python/README.md
Normal file
8
examples/hello-python/README.md
Normal file
@ -0,0 +1,8 @@
|
||||
# Simple Python Demo
|
||||
## A Redis counter
|
||||
|
||||
```
|
||||
podman-compose up -d
|
||||
curl localhost:8080/
|
||||
curl localhost:8080/hello.json
|
||||
```
|
0
examples/hello-python/app/__init__.py
Normal file
0
examples/hello-python/app/__init__.py
Normal file
39
examples/hello-python/app/web.py
Normal file
39
examples/hello-python/app/web.py
Normal file
@ -0,0 +1,39 @@
|
||||
# pylint: disable=import-error
|
||||
# pylint: disable=unused-import
|
||||
import os
|
||||
import asyncio # noqa: F401
|
||||
|
||||
import aioredis
|
||||
from aiohttp import web
|
||||
|
||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||
REDIS_PORT = int(os.environ.get("REDIS_PORT", "6379"))
|
||||
REDIS_DB = int(os.environ.get("REDIS_DB", "0"))
|
||||
|
||||
redis = aioredis.from_url(f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}")
|
||||
app = web.Application()
|
||||
routes = web.RouteTableDef()
|
||||
|
||||
|
||||
@routes.get("/")
|
||||
async def hello(request): # pylint: disable=unused-argument
|
||||
counter = await redis.incr("mycounter")
|
||||
return web.Response(text=f"counter={counter}")
|
||||
|
||||
|
||||
@routes.get("/hello.json")
|
||||
async def hello_json(request): # pylint: disable=unused-argument
|
||||
counter = await redis.incr("mycounter")
|
||||
data = {"counter": counter}
|
||||
return web.json_response(data)
|
||||
|
||||
|
||||
app.add_routes(routes)
|
||||
|
||||
|
||||
def main():
|
||||
web.run_app(app, port=8080)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
21
examples/hello-python/docker-compose.yaml
Normal file
21
examples/hello-python/docker-compose.yaml
Normal file
@ -0,0 +1,21 @@
|
||||
---
|
||||
version: '3'
|
||||
volumes:
|
||||
redis:
|
||||
services:
|
||||
redis:
|
||||
read_only: true
|
||||
image: docker.io/redis:alpine
|
||||
command: ["redis-server", "--appendonly", "yes", "--notify-keyspace-events", "Ex"]
|
||||
volumes:
|
||||
- redis:/data
|
||||
web:
|
||||
read_only: true
|
||||
build:
|
||||
context: .
|
||||
image: hello-py-aioweb
|
||||
ports:
|
||||
- 8080:8080
|
||||
environment:
|
||||
REDIS_HOST: redis
|
||||
|
3
examples/hello-python/requirements.txt
Normal file
3
examples/hello-python/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
aiohttp
|
||||
aioredis
|
||||
# aioredis[hiredis]
|
71
examples/nodeproj/.eslintrc.json
Normal file
71
examples/nodeproj/.eslintrc.json
Normal file
@ -0,0 +1,71 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"node": {
|
||||
"extensions": [".js", ".mjs", ".ts", ".cjs"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module",
|
||||
"allowImportExportEverywhere": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:import/typescript",
|
||||
"plugin:promise/recommended",
|
||||
"google",
|
||||
"plugin:security/recommended"
|
||||
],
|
||||
"plugins": ["promise", "security", "import"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": "public/**/*.min.js",
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": false,
|
||||
"es6": false
|
||||
},
|
||||
"parserOptions": {
|
||||
"sourceType": "script"
|
||||
},
|
||||
"extends": ["plugin:compat/recommended"],
|
||||
"plugins": [],
|
||||
"rules": {
|
||||
"no-var": ["off"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"rules": {
|
||||
"security/detect-non-literal-fs-filename":["off"],
|
||||
"security/detect-object-injection":["off"],
|
||||
"camelcase": ["off"],
|
||||
"no-console": ["off"],
|
||||
"require-jsdoc": ["off"],
|
||||
"one-var": ["off"],
|
||||
"guard-for-in": ["off"],
|
||||
"max-len": [
|
||||
"warn",
|
||||
{
|
||||
"ignoreComments": true,
|
||||
"ignoreTrailingComments": true,
|
||||
"ignoreUrls": true,
|
||||
"code": 200
|
||||
}
|
||||
],
|
||||
"indent": ["warn", 4],
|
||||
"no-unused-vars": ["warn"],
|
||||
"no-extra-semi": ["warn"],
|
||||
"linebreak-style": ["error", "unix"],
|
||||
"quotes": ["warn", "double"],
|
||||
"semi": ["error", "always"]
|
||||
}
|
||||
}
|
5
examples/nodeproj/.gitignore
vendored
Normal file
5
examples/nodeproj/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
local.env
|
||||
.env
|
||||
*.pid
|
||||
node_modules
|
||||
|
1
examples/nodeproj/.home/.gitignore
vendored
Normal file
1
examples/nodeproj/.home/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*
|
16
examples/nodeproj/README.md
Normal file
16
examples/nodeproj/README.md
Normal file
@ -0,0 +1,16 @@
|
||||
# How to run example
|
||||
|
||||
|
||||
|
||||
```
|
||||
cp example.local.env local.env
|
||||
cp example.env .env
|
||||
cat local.env
|
||||
cat .env
|
||||
echo "UID=$UID" >> .env
|
||||
cat .env
|
||||
podman-compose build
|
||||
podman-compose run --rm --no-deps init
|
||||
podman-compose up
|
||||
```
|
||||
|
12
examples/nodeproj/containers/node16-runtime/Dockerfile
Normal file
12
examples/nodeproj/containers/node16-runtime/Dockerfile
Normal file
@ -0,0 +1,12 @@
|
||||
FROM registry.fedoraproject.org/fedora-minimal:35
|
||||
ARG NODE_VER=16
|
||||
# microdnf -y module enable nodejs:${NODE_VER}
|
||||
RUN \
|
||||
echo -e "[nodejs]\nname=nodejs\nstream=${NODE_VER}\nprofiles=\nstate=enabled\n" > /etc/dnf/modules.d/nodejs.module && \
|
||||
microdnf -y install shadow-utils nodejs zopfli findutils busybox && \
|
||||
microdnf clean all
|
||||
RUN adduser -d /app app && mkdir -p /app/code/.home && chown app:app -R /app/code && chmod 711 /app /app/code/.home && usermod -d /app/code/.home app
|
||||
ENV XDG_CONFIG_HOME=/app/code/.home
|
||||
ENV HOME=/app/code/.home
|
||||
WORKDIR /app/code
|
||||
|
48
examples/nodeproj/docker-compose.yml
Normal file
48
examples/nodeproj/docker-compose.yml
Normal file
@ -0,0 +1,48 @@
|
||||
version: '3'
|
||||
volumes:
|
||||
redis:
|
||||
services:
|
||||
redis:
|
||||
read_only: true
|
||||
image: docker.io/redis:alpine
|
||||
command: ["redis-server", "--appendonly", "yes", "--notify-keyspace-events", "Ex"]
|
||||
volumes:
|
||||
- redis:/data
|
||||
tmpfs:
|
||||
- /tmp
|
||||
- /var/run
|
||||
- /run
|
||||
init:
|
||||
read_only: true
|
||||
#userns_mode: keep-id
|
||||
user: ${UID:-1000}
|
||||
build:
|
||||
context: ./containers/${NODE_IMG:-node16-runtime}
|
||||
image: ${NODE_IMG:-node16-runtime}
|
||||
env_file:
|
||||
- local.env
|
||||
volumes:
|
||||
- .:/app/code
|
||||
command: ["/bin/sh", "-c", "mkdir -p ~/; [ -d ./node_modules ] && echo '** node_modules exists' || npm install"]
|
||||
tmpfs:
|
||||
- /tmp
|
||||
- /run
|
||||
task:
|
||||
extends:
|
||||
service: init
|
||||
command: ["npm", "run", "cli", "--", "task"]
|
||||
links:
|
||||
- redis
|
||||
depends_on:
|
||||
- redis
|
||||
web:
|
||||
extends:
|
||||
service: init
|
||||
command: ["npm", "run", "cli", "--", "web"]
|
||||
ports:
|
||||
- ${WEB_LISTEN_PORT:-3000}:3000
|
||||
depends_on:
|
||||
- redis
|
||||
links:
|
||||
- mongo
|
||||
|
3
examples/nodeproj/example.env
Normal file
3
examples/nodeproj/example.env
Normal file
@ -0,0 +1,3 @@
|
||||
WEB_LISTEN_PORT=3000
|
||||
# pass UID= your IDE user
|
||||
|
2
examples/nodeproj/example.local.env
Normal file
2
examples/nodeproj/example.local.env
Normal file
@ -0,0 +1,2 @@
|
||||
REDIS_HOST=redis
|
||||
|
6
examples/nodeproj/index.js
Normal file
6
examples/nodeproj/index.js
Normal file
@ -0,0 +1,6 @@
|
||||
#! /usr/bin/env node
|
||||
"use strict";
|
||||
import {start} from "./lib";
|
||||
|
||||
start();
|
||||
|
14
examples/nodeproj/jsconfig.json
Normal file
14
examples/nodeproj/jsconfig.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2020",
|
||||
"module": "es2020",
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"include": [
|
||||
"lib/**/*.js"
|
||||
]
|
||||
}
|
31
examples/nodeproj/lib/commands/task.js
Normal file
31
examples/nodeproj/lib/commands/task.js
Normal file
@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
import {proj} from "../proj";
|
||||
|
||||
async function loop() {
|
||||
const poped = await proj.predis.blpop("queue", 5);
|
||||
const task_desc_s = poped[1];
|
||||
let task_desc;
|
||||
try {
|
||||
task_desc = JSON.parse(task_desc_s);
|
||||
} catch (e) {
|
||||
console.exception(e);
|
||||
}
|
||||
console.info("got task "+task_desc.func);
|
||||
const func = task_desc.func;
|
||||
const args = task_desc.args;
|
||||
if (typeof(proj.tasks[func])!="function") {
|
||||
console.log(`task ${func} not found`);
|
||||
process.exit(-1)
|
||||
}
|
||||
try {
|
||||
await ((this.tasks[func])(...args));
|
||||
} catch (e) {
|
||||
console.exception(e);
|
||||
}
|
||||
}
|
||||
|
||||
export async function start() {
|
||||
while(true) {
|
||||
loop();
|
||||
}
|
||||
}
|
21
examples/nodeproj/lib/commands/web.js
Normal file
21
examples/nodeproj/lib/commands/web.js
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
import {proj} from "../proj";
|
||||
|
||||
import http from "http";
|
||||
import express from "express";
|
||||
|
||||
|
||||
export async function start() {
|
||||
const app = express();
|
||||
const server = http.createServer(app);
|
||||
|
||||
// Routing
|
||||
app.use(express.static(proj.config.basedir + "/public"));
|
||||
app.get("/healthz", function(req, res) {
|
||||
res.send("ok@"+Date.now());
|
||||
});
|
||||
|
||||
server.listen(proj.config.LISTEN_PORT, proj.config.LISTEN_HOST, function() {
|
||||
console.warn(`listening at port ${proj.config.LISTEN_PORT}`);
|
||||
});
|
||||
}
|
24
examples/nodeproj/package.json
Normal file
24
examples/nodeproj/package.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "nodeproj",
|
||||
"version": "0.0.1",
|
||||
"description": "nodejs example project",
|
||||
"exports": {
|
||||
".": "./index.js",
|
||||
"./lib": "./lib"
|
||||
},
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"cli": "nodemon -w lib -w index.js --es-module-specifier-resolution=node ./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "~4.16.4",
|
||||
"redis": "^3.1.2"
|
||||
},
|
||||
"private": true,
|
||||
"author": "",
|
||||
"license": "proprietary",
|
||||
"devDependencies": {
|
||||
"nodemon": "^2.0.14"
|
||||
}
|
||||
}
|
18
examples/nodeproj/public/index.html
Normal file
18
examples/nodeproj/public/index.html
Normal file
@ -0,0 +1,18 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Vote</title>
|
||||
<link rel="stylesheet" href="https://unpkg.com/browse/normalize.css@8.0.1/normalize.css">
|
||||
<link rel="stylesheet" href="styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<h1>This is a Heading</h1>
|
||||
<p>This is a paragraph.</p>
|
||||
</body>
|
||||
<script type="text/javascript" src="main.css"></script>
|
||||
<script type="text/javascript">
|
||||
//<![CDATA[
|
||||
console.log("loaded");
|
||||
//]]>
|
||||
</script>
|
||||
</html>
|
24
examples/wordpress/docker-compose.yaml
Normal file
24
examples/wordpress/docker-compose.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
volumes:
|
||||
db_data:
|
||||
services:
|
||||
wordpress:
|
||||
image: docker.io/library/wordpress:latest
|
||||
ports:
|
||||
- 8080:80
|
||||
environment:
|
||||
- WORDPRESS_DB_HOST=db
|
||||
- WORDPRESS_DB_USER=wordpress
|
||||
- WORDPRESS_DB_PASSWORD=password
|
||||
- WORDPRESS_DB_NAME=wordpress
|
||||
db:
|
||||
image: docker.io/library/mariadb:10.6.4-focal
|
||||
command: '--default-authentication-plugin=mysql_native_password'
|
||||
volumes:
|
||||
- db_data:/var/lib/mysql
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=somewordpress
|
||||
- MYSQL_DATABASE=wordpress
|
||||
- MYSQL_USER=wordpress
|
||||
- MYSQL_PASSWORD=password
|
||||
|
2864
podman_compose.py
2864
podman_compose.py
File diff suppressed because it is too large
Load Diff
168
pytests/test_can_merge_build.py
Normal file
168
pytests/test_can_merge_build.py
Normal file
@ -0,0 +1,168 @@
|
||||
import copy
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
from podman_compose import normalize_service, PodmanCompose
|
||||
|
||||
|
||||
test_cases_simple = [
|
||||
({"test": "test"}, {"test": "test"}),
|
||||
({"build": "."}, {"build": {"context": "."}}),
|
||||
({"build": "./dir-1"}, {"build": {"context": "./dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_normalize_service_simple():
|
||||
for test_case, expected in copy.deepcopy(test_cases_simple):
|
||||
test_original = copy.deepcopy(test_case)
|
||||
test_case = normalize_service(test_case)
|
||||
test_result = expected == test_case
|
||||
if not test_result:
|
||||
print("test: ", test_original)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_case)
|
||||
assert test_result
|
||||
|
||||
|
||||
test_cases_sub_dir = [
|
||||
({"test": "test"}, {"test": "test"}),
|
||||
({"build": "."}, {"build": {"context": "./sub_dir/."}}),
|
||||
({"build": "./dir-1"}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {"build": {"context": "./sub_dir/dir-1"}}),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./sub_dir", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"context": "./dir-1", "dockerfile": "dockerfile-1"}},
|
||||
{"build": {"context": "./sub_dir/dir-1", "dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_normalize_service_with_sub_dir():
|
||||
for test_case, expected in copy.deepcopy(test_cases_sub_dir):
|
||||
test_original = copy.deepcopy(test_case)
|
||||
test_case = normalize_service(test_case, sub_dir="./sub_dir")
|
||||
test_result = expected == test_case
|
||||
if not test_result:
|
||||
print("test: ", test_original)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_case)
|
||||
assert test_result
|
||||
|
||||
|
||||
test_cases_merges = [
|
||||
({}, {}, {}),
|
||||
({}, {"test": "test"}, {"test": "test"}),
|
||||
({"test": "test"}, {}, {"test": "test"}),
|
||||
({"test": "test-1"}, {"test": "test-2"}, {"test": "test-2"}),
|
||||
({}, {"build": "."}, {"build": {"context": "."}}),
|
||||
({"build": "."}, {}, {"build": {"context": "."}}),
|
||||
({"build": "./dir-1"}, {"build": "./dir-2"}, {"build": {"context": "./dir-2"}}),
|
||||
({}, {"build": {"context": "./dir-1"}}, {"build": {"context": "./dir-1"}}),
|
||||
({"build": {"context": "./dir-1"}}, {}, {"build": {"context": "./dir-1"}}),
|
||||
(
|
||||
{"build": {"context": "./dir-1"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
{},
|
||||
{"build": {"dockerfile": "dockerfile-1"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"context": "./dir-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1", "context": "./dir-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "context": "./dir-2"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1"]}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV2=2"]}},
|
||||
{"build": {"dockerfile": "./dockerfile-1", "args": ["ENV1=1", "ENV2=2"]}},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for test_input, test_override, expected_result in copy.deepcopy(test_cases_merges):
|
||||
compose_test_1 = {"services": {"test-service": test_input}}
|
||||
compose_test_2 = {"services": {"test-service": test_override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||
|
||||
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("override: ", test_override)
|
||||
print("expected: ", expected_result)
|
||||
print("actual: ", actual_compose)
|
||||
|
||||
compose_expected = expected_result
|
||||
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = True
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
122
pytests/test_can_merge_cmd_ent.py
Normal file
122
pytests/test_can_merge_cmd_ent.py
Normal file
@ -0,0 +1,122 @@
|
||||
import copy
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
from podman_compose import normalize_service, PodmanCompose
|
||||
|
||||
test_keys = ["command", "entrypoint"]
|
||||
|
||||
test_cases_normalise_pre_merge = [
|
||||
({"$$$": []}, {"$$$": []}),
|
||||
({"$$$": ["sh"]}, {"$$$": ["sh"]}),
|
||||
({"$$$": ["sh", "-c", "date"]}, {"$$$": ["sh", "-c", "date"]}),
|
||||
({"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||
(
|
||||
{"$$$": "bash -c 'sleep infinity'"},
|
||||
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||
),
|
||||
]
|
||||
|
||||
test_cases_merges = [
|
||||
({}, {"$$$": []}, {"$$$": []}),
|
||||
({"$$$": []}, {}, {"$$$": []}),
|
||||
({"$$$": []}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-2"}, {"$$$": []}, {"$$$": []}),
|
||||
({}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh"}, {}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": ["sh-1"]}, {"$$$": "sh-2"}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": ["sh-2"]}, {"$$$": ["sh-2"]}),
|
||||
({"$$$": "sh-1"}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||
({"$$$": ["sh-1"]}, {"$$$": ["sh-2", "sh-3"]}, {"$$$": ["sh-2", "sh-3"]}),
|
||||
({"$$$": ["sh-1", "sh-2"]}, {"$$$": ["sh-3", "sh-4"]}, {"$$$": ["sh-3", "sh-4"]}),
|
||||
({}, {"$$$": ["sh-3", "sh 4"]}, {"$$$": ["sh-3", "sh 4"]}),
|
||||
({"$$$": "sleep infinity"}, {"$$$": "sh"}, {"$$$": ["sh"]}),
|
||||
({"$$$": "sh"}, {"$$$": "sleep infinity"}, {"$$$": ["sleep", "infinity"]}),
|
||||
(
|
||||
{},
|
||||
{"$$$": "bash -c 'sleep infinity'"},
|
||||
{"$$$": ["bash", "-c", "sleep infinity"]},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def template_to_expression(base, override, expected, key):
|
||||
base_copy = copy.deepcopy(base)
|
||||
override_copy = copy.deepcopy(override)
|
||||
expected_copy = copy.deepcopy(expected)
|
||||
|
||||
expected_copy[key] = expected_copy.pop("$$$")
|
||||
if "$$$" in base:
|
||||
base_copy[key] = base_copy.pop("$$$")
|
||||
if "$$$" in override:
|
||||
override_copy[key] = override_copy.pop("$$$")
|
||||
return base_copy, override_copy, expected_copy
|
||||
|
||||
|
||||
def test_normalize_service():
|
||||
for test_input_template, expected_template in test_cases_normalise_pre_merge:
|
||||
for key in test_keys:
|
||||
test_input, _, expected = template_to_expression(
|
||||
test_input_template, {}, expected_template, key
|
||||
)
|
||||
test_input = normalize_service(test_input)
|
||||
test_result = expected == test_input
|
||||
if not test_result:
|
||||
print("base_template: ", test_input_template)
|
||||
print("expected: ", expected)
|
||||
print("actual: ", test_input)
|
||||
assert test_result
|
||||
|
||||
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for base_template, override_template, expected_template in copy.deepcopy(
|
||||
test_cases_merges
|
||||
):
|
||||
for key in test_keys:
|
||||
base, override, expected = template_to_expression(
|
||||
base_template, override_template, expected_template, key
|
||||
)
|
||||
compose_test_1 = {"services": {"test-service": base}}
|
||||
compose_test_2 = {"services": {"test-service": override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose-1.yaml", "test-compose-2.yaml"])
|
||||
|
||||
podman_compose._parse_compose_file() # pylint: disable=protected-access
|
||||
|
||||
actual = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual = podman_compose.services["test-service"]
|
||||
if actual != expected:
|
||||
print("compose: ", base)
|
||||
print("override: ", override)
|
||||
print("result: ", expected)
|
||||
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def set_args(podman_compose: PodmanCompose, file_names: list[str]) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = None
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
298
pytests/test_normalize_final_build.py
Normal file
298
pytests/test_normalize_final_build.py
Normal file
@ -0,0 +1,298 @@
|
||||
# pylint: disable=protected-access
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import os
|
||||
import yaml
|
||||
from podman_compose import (
|
||||
normalize_service,
|
||||
normalize,
|
||||
normalize_final,
|
||||
normalize_service_final,
|
||||
PodmanCompose,
|
||||
)
|
||||
|
||||
cwd = os.path.abspath(".")
|
||||
test_cases_simple_normalization = [
|
||||
({"image": "test-image"}, {"image": "test-image"}),
|
||||
(
|
||||
{"build": "."},
|
||||
{
|
||||
"build": {"context": cwd, "dockerfile": "Dockerfile"},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "../relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "../relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{
|
||||
"build": {
|
||||
"context": "/workspace/absolute",
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {
|
||||
"context": ".",
|
||||
},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "Dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {"context": "../", "dockerfile": "test-dockerfile"},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "../")),
|
||||
"dockerfile": "test-dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"build": {"context": ".", "dockerfile": "./dev/test-dockerfile"},
|
||||
},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./dev/test-dockerfile",
|
||||
},
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# [service.build] is normalised after merges
|
||||
#
|
||||
def test_normalize_service_final_returns_absolute_path_in_context() -> None:
|
||||
project_dir = cwd
|
||||
for test_input, expected_service in copy.deepcopy(test_cases_simple_normalization):
|
||||
actual_service = normalize_service_final(test_input, project_dir)
|
||||
assert expected_service == actual_service
|
||||
|
||||
|
||||
def test_normalize_returns_absolute_path_in_context() -> None:
|
||||
project_dir = cwd
|
||||
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||
compose_test = {"services": {"test-service": test_input}}
|
||||
compose_expected = {"services": {"test-service": expected_result}}
|
||||
actual_compose = normalize_final(compose_test, project_dir)
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
#
|
||||
# running full parse over single compose files
|
||||
#
|
||||
def test__parse_compose_file_when_single_compose() -> None:
|
||||
for test_input, expected_result in copy.deepcopy(test_cases_simple_normalization):
|
||||
compose_test = {"services": {"test-service": test_input}}
|
||||
dump_yaml(compose_test, "test-compose.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(podman_compose, ["test-compose.yaml"], no_normalize=None)
|
||||
|
||||
podman_compose._parse_compose_file()
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("result: ", expected_result)
|
||||
|
||||
assert expected_result == actual_compose
|
||||
|
||||
|
||||
test_cases_with_merges = [
|
||||
(
|
||||
{},
|
||||
{"build": "."},
|
||||
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "."},
|
||||
{},
|
||||
{"build": {"context": cwd, "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": "./relative"},
|
||||
{
|
||||
"build": {
|
||||
"context": os.path.normpath(os.path.join(cwd, "./relative")),
|
||||
"dockerfile": "Dockerfile",
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": "./relative"},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "Dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile-1"}},
|
||||
{"build": {"dockerfile": "test-dockerfile-2"}},
|
||||
{"build": {"context": cwd, "dockerfile": "test-dockerfile-2"}},
|
||||
),
|
||||
(
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "test-dockerfile"}},
|
||||
{"build": "/workspace/absolute"},
|
||||
{"build": {"context": "/workspace/absolute", "dockerfile": "test-dockerfile"}},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1"}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV1=1"]}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1"],
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2"}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1"],
|
||||
}
|
||||
},
|
||||
),
|
||||
(
|
||||
{"build": {"dockerfile": "./test-dockerfile-1", "args": ["ENV1=1"]}},
|
||||
{"build": {"dockerfile": "./test-dockerfile-2", "args": ["ENV2=2"]}},
|
||||
{
|
||||
"build": {
|
||||
"context": cwd,
|
||||
"dockerfile": "./test-dockerfile-2",
|
||||
"args": ["ENV1=1", "ENV2=2"],
|
||||
}
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# running full parse over merged
|
||||
#
|
||||
def test__parse_compose_file_when_multiple_composes() -> None:
|
||||
for test_input, test_override, expected_result in copy.deepcopy(
|
||||
test_cases_with_merges
|
||||
):
|
||||
compose_test_1 = {"services": {"test-service": test_input}}
|
||||
compose_test_2 = {"services": {"test-service": test_override}}
|
||||
dump_yaml(compose_test_1, "test-compose-1.yaml")
|
||||
dump_yaml(compose_test_2, "test-compose-2.yaml")
|
||||
|
||||
podman_compose = PodmanCompose()
|
||||
set_args(
|
||||
podman_compose,
|
||||
["test-compose-1.yaml", "test-compose-2.yaml"],
|
||||
no_normalize=None,
|
||||
)
|
||||
|
||||
podman_compose._parse_compose_file()
|
||||
|
||||
actual_compose = {}
|
||||
if podman_compose.services:
|
||||
podman_compose.services["test-service"].pop("_deps")
|
||||
actual_compose = podman_compose.services["test-service"]
|
||||
if actual_compose != expected_result:
|
||||
print("compose: ", test_input)
|
||||
print("override: ", test_override)
|
||||
print("result: ", expected_result)
|
||||
compose_expected = expected_result
|
||||
|
||||
assert compose_expected == actual_compose
|
||||
|
||||
|
||||
def set_args(
|
||||
podman_compose: PodmanCompose, file_names: list[str], no_normalize: bool
|
||||
) -> None:
|
||||
podman_compose.global_args = argparse.Namespace()
|
||||
podman_compose.global_args.file = file_names
|
||||
podman_compose.global_args.project_name = None
|
||||
podman_compose.global_args.env_file = None
|
||||
podman_compose.global_args.profile = []
|
||||
podman_compose.global_args.in_pod = True
|
||||
podman_compose.global_args.no_normalize = no_normalize
|
||||
|
||||
|
||||
def dump_yaml(compose: dict, name: str) -> None:
|
||||
# Path(Path.cwd()/"subdirectory").mkdir(parents=True, exist_ok=True)
|
||||
with open(name, "w", encoding="utf-8") as outfile:
|
||||
yaml.safe_dump(compose, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def test_clean_test_yamls() -> None:
|
||||
test_files = ["test-compose-1.yaml", "test-compose-2.yaml", "test-compose.yaml"]
|
||||
for file in test_files:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
21
pytests/test_volumes.py
Normal file
21
pytests/test_volumes.py
Normal file
@ -0,0 +1,21 @@
|
||||
# pylint: disable=redefined-outer-name
|
||||
import pytest
|
||||
|
||||
from podman_compose import parse_short_mount
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def multi_propagation_mount_str():
|
||||
return "/foo/bar:/baz:U,Z"
|
||||
|
||||
|
||||
def test_parse_short_mount_multi_propagation(multi_propagation_mount_str):
|
||||
expected = {
|
||||
"type": "bind",
|
||||
"source": "/foo/bar",
|
||||
"target": "/baz",
|
||||
"bind": {
|
||||
"propagation": "U,Z",
|
||||
},
|
||||
}
|
||||
assert parse_short_mount(multi_propagation_mount_str, "/") == expected
|
@ -3,3 +3,7 @@ universal = 1
|
||||
|
||||
[metadata]
|
||||
version = attr: podman_compose.__version__
|
||||
|
||||
[flake8]
|
||||
# The GitHub editor is 127 chars wide
|
||||
max-line-length=127
|
50
setup.py
50
setup.py
@ -2,43 +2,51 @@ import os
|
||||
from setuptools import setup
|
||||
|
||||
try:
|
||||
readme = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
|
||||
except:
|
||||
readme = ''
|
||||
README = open(
|
||||
os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8"
|
||||
).read()
|
||||
except: # noqa: E722 # pylint: disable=bare-except
|
||||
README = ""
|
||||
|
||||
setup(
|
||||
name='podman-compose',
|
||||
name="podman-compose",
|
||||
description="A script to run docker-compose.yml using podman",
|
||||
long_description=readme,
|
||||
long_description_content_type='text/markdown',
|
||||
long_description=README,
|
||||
long_description_content_type="text/markdown",
|
||||
classifiers=[
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Intended Audience :: Developers",
|
||||
"Operating System :: OS Independent",
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
|
||||
],
|
||||
keywords='podman, podman-compose',
|
||||
author='Muayyad Alsadi',
|
||||
author_email='alsadi@gmail.com',
|
||||
url='https://github.com/containers/podman-compose',
|
||||
py_modules=['podman_compose'],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'podman-compose = podman_compose:main'
|
||||
]
|
||||
},
|
||||
keywords="podman, podman-compose",
|
||||
author="Muayyad Alsadi",
|
||||
author_email="alsadi@gmail.com",
|
||||
url="https://github.com/containers/podman-compose",
|
||||
py_modules=["podman_compose"],
|
||||
entry_points={"console_scripts": ["podman-compose = podman_compose:main"]},
|
||||
include_package_data=True,
|
||||
license='GPL-2.0-only',
|
||||
license="GPL-2.0-only",
|
||||
install_requires=[
|
||||
'pyyaml',
|
||||
'python-dotenv',
|
||||
"pyyaml",
|
||||
"python-dotenv",
|
||||
],
|
||||
extras_require={
|
||||
"devel": [
|
||||
"flake8",
|
||||
"black",
|
||||
"pylint",
|
||||
"pre-commit",
|
||||
]
|
||||
}
|
||||
# test_suite='tests',
|
||||
# tests_require=[
|
||||
# 'coverage',
|
||||
|
@ -6,3 +6,4 @@ coverage
|
||||
pytest-cov
|
||||
pytest
|
||||
tox
|
||||
black
|
||||
|
22
tests/build_fail/README.md
Normal file
22
tests/build_fail/README.md
Normal file
@ -0,0 +1,22 @@
|
||||
# Test podman-compose with build (fail scenario)
|
||||
|
||||
```shell
|
||||
podman-compose build || echo $?
|
||||
```
|
||||
|
||||
expected output would be something like
|
||||
|
||||
```
|
||||
STEP 1/3: FROM busybox
|
||||
STEP 2/3: RUN this_command_does_not_exist
|
||||
/bin/sh: this_command_does_not_exist: not found
|
||||
Error: building at STEP "RUN this_command_does_not_exist": while running runtime: exit status 127
|
||||
|
||||
exit code: 127
|
||||
```
|
||||
|
||||
Expected `podman-compose` exit code:
|
||||
```shell
|
||||
echo $?
|
||||
127
|
||||
```
|
3
tests/build_fail/context/Dockerfile
Normal file
3
tests/build_fail/context/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
||||
FROM busybox
|
||||
RUN this_command_does_not_exist
|
||||
CMD ["sh"]
|
5
tests/build_fail/docker-compose.yml
Normal file
5
tests/build_fail/docker-compose.yml
Normal file
@ -0,0 +1,5 @@
|
||||
version: "3"
|
||||
services:
|
||||
test:
|
||||
build: ./context
|
||||
image: build-fail-img
|
26
tests/conftest.py
Normal file
26
tests/conftest.py
Normal file
@ -0,0 +1,26 @@
|
||||
"""conftest.py
|
||||
|
||||
Defines global pytest fixtures available to all tests.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
from pathlib import Path
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_path():
|
||||
"""Returns the base path for the project"""
|
||||
return Path(__file__).parent.parent
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_path(base_path):
|
||||
"""Returns the path to the tests directory"""
|
||||
return os.path.join(base_path, "tests")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def podman_compose_path(base_path):
|
||||
"""Returns the path to the podman compose script"""
|
||||
return os.path.join(base_path, "podman_compose.py")
|
9
tests/env-file-tests/README.md
Normal file
9
tests/env-file-tests/README.md
Normal file
@ -0,0 +1,9 @@
|
||||
running the following commands should always give podman-rocks-123
|
||||
|
||||
```
|
||||
podman-compose -f project/container-compose.yaml --env-file env-files/project-1.env up
|
||||
```
|
||||
|
||||
```
|
||||
podman-compose -f $(pwd)/project/container-compose.yaml --env-file $(pwd)/env-files/project-1.env up
|
||||
```
|
1
tests/env-file-tests/env-files/project-1.env
Normal file
1
tests/env-file-tests/env-files/project-1.env
Normal file
@ -0,0 +1 @@
|
||||
ZZVAR1=podman-rocks-123
|
9
tests/env-file-tests/project/container-compose.yaml
Normal file
9
tests/env-file-tests/project/container-compose.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
services:
|
||||
app:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "sh", "-c", "env | grep ZZ"]
|
||||
tmpfs:
|
||||
- /run
|
||||
- /tmp
|
||||
environment:
|
||||
ZZVAR1: $ZZVAR1
|
7
tests/extends_w_empty_service/common-services.yml
Normal file
7
tests/extends_w_empty_service/common-services.yml
Normal file
@ -0,0 +1,7 @@
|
||||
services:
|
||||
webapp_default:
|
||||
|
||||
webapp_special:
|
||||
image: busybox
|
||||
volumes:
|
||||
- "/data"
|
10
tests/extends_w_empty_service/docker-compose.yml
Normal file
10
tests/extends_w_empty_service/docker-compose.yml
Normal file
@ -0,0 +1,10 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
extends:
|
||||
file: common-services.yml
|
||||
service: webapp_default
|
||||
environment:
|
||||
- DEBUG=1
|
||||
cpu_shares: 5
|
8
tests/extends_w_file_subdir/docker-compose.yml
Normal file
8
tests/extends_w_file_subdir/docker-compose.yml
Normal file
@ -0,0 +1,8 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
extends:
|
||||
file: sub/docker-compose.yml
|
||||
service: webapp
|
||||
environment:
|
||||
- DEBUG=1
|
12
tests/extends_w_file_subdir/sub/docker-compose.yml
Normal file
12
tests/extends_w_file_subdir/sub/docker-compose.yml
Normal file
@ -0,0 +1,12 @@
|
||||
version: "3"
|
||||
services:
|
||||
webapp:
|
||||
build:
|
||||
context: docker/example
|
||||
dockerfile: Dockerfile
|
||||
image: localhost/subdir_test:me
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- "/data"
|
||||
|
@ -0,0 +1 @@
|
||||
FROM busybox as base
|
7
tests/include/docker-compose.base.yaml
Normal file
7
tests/include/docker-compose.base.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
version: '3.6'
|
||||
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", ".", "-p", "8003"]
|
||||
|
4
tests/include/docker-compose.yaml
Normal file
4
tests/include/docker-compose.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
version: '3.6'
|
||||
|
||||
include:
|
||||
- docker-compose.base.yaml
|
7
tests/nethost/docker-compose.yaml
Normal file
7
tests/nethost/docker-compose.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
version: '3'
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
command: httpd -f -p 8123 -h /etc/
|
||||
network_mode: host
|
||||
|
16
tests/netprio/docker-compose.yaml
Normal file
16
tests/netprio/docker-compose.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
# https://github.com/compose-spec/compose-spec/blob/master/spec.md#priority
|
||||
services:
|
||||
app:
|
||||
image: busybox
|
||||
command: top
|
||||
networks:
|
||||
app_net_1:
|
||||
app_net_2:
|
||||
priority: 1000
|
||||
app_net_3:
|
||||
priority: 100
|
||||
networks:
|
||||
app_net_1:
|
||||
app_net_2:
|
||||
app_net_3:
|
@ -11,7 +11,7 @@ services:
|
||||
- ./test1.txt:/var/www/html/index.txt:ro,z
|
||||
web2:
|
||||
image: busybox
|
||||
hostname: web1
|
||||
hostname: web2
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
|
||||
working_dir: /var/www/html
|
||||
ports:
|
||||
|
@ -28,4 +28,18 @@ services:
|
||||
- 8002:8001
|
||||
volumes:
|
||||
- ./test2.txt:/var/www/html/index.txt:ro,z
|
||||
web3:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
|
||||
working_dir: /var/www/html
|
||||
networks:
|
||||
net1:
|
||||
aliases:
|
||||
- alias11
|
||||
- alias12
|
||||
net2:
|
||||
aliases:
|
||||
- alias21
|
||||
volumes:
|
||||
- ./test2.txt:/var/www/html/index.txt:ro,z
|
||||
|
||||
|
24
tests/profile/docker-compose.yml
Normal file
24
tests/profile/docker-compose.yml
Normal file
@ -0,0 +1,24 @@
|
||||
version: "3"
|
||||
services:
|
||||
default-service:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||
tmpfs:
|
||||
- /run
|
||||
- /tmp
|
||||
service-1:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||
tmpfs:
|
||||
- /run
|
||||
- /tmp
|
||||
profiles:
|
||||
- profile-1
|
||||
service-2:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/etc/", "-p", "8000"]
|
||||
tmpfs:
|
||||
- /run
|
||||
- /tmp
|
||||
profiles:
|
||||
- profile-2
|
@ -1,3 +1,7 @@
|
||||
---
|
||||
# echo "sec" | podman secret create my_secret -
|
||||
# echo "sec2" | podman secret create my_secret_2 -
|
||||
# echo "sec3" | podman secret create my_secret_3 -
|
||||
version: "3.8"
|
||||
services:
|
||||
test:
|
||||
|
84
tests/test_podman_compose.py
Normal file
84
tests/test_podman_compose.py
Normal file
@ -0,0 +1,84 @@
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture(command):
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
return out, err, proc.returncode
|
||||
|
||||
|
||||
def test_podman_compose_extends_w_file_subdir():
|
||||
"""
|
||||
Test that podman-compose can execute podman-compose -f <file> up with extended File which
|
||||
includes a build context
|
||||
:return:
|
||||
"""
|
||||
main_path = Path(__file__).parent.parent
|
||||
|
||||
command_up = [
|
||||
"python3",
|
||||
str(main_path.joinpath("podman_compose.py")),
|
||||
"-f",
|
||||
str(main_path.joinpath("tests", "extends_w_file_subdir", "docker-compose.yml")),
|
||||
"up",
|
||||
"-d",
|
||||
]
|
||||
|
||||
command_check_container = [
|
||||
"podman",
|
||||
"container",
|
||||
"ps",
|
||||
"--all",
|
||||
"--format",
|
||||
'"{{.Image}}"',
|
||||
]
|
||||
|
||||
command_down = [
|
||||
"podman",
|
||||
"rmi",
|
||||
"--force",
|
||||
"localhost/subdir_test:me",
|
||||
"docker.io/library/busybox",
|
||||
]
|
||||
|
||||
out, _, returncode = capture(command_up)
|
||||
assert 0 == returncode
|
||||
# check container was created and exists
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b'"localhost/subdir_test:me"\n'
|
||||
out, _, returncode = capture(command_down)
|
||||
# cleanup test image(tags)
|
||||
assert 0 == returncode
|
||||
# check container did not exists anymore
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b""
|
||||
|
||||
|
||||
def test_podman_compose_extends_w_empty_service():
|
||||
"""
|
||||
Test that podman-compose can execute podman-compose -f <file> up with extended File which
|
||||
includes an empty service. (e.g. if the file is used as placeholder for more complex configurations.)
|
||||
:return:
|
||||
"""
|
||||
main_path = Path(__file__).parent.parent
|
||||
|
||||
command_up = [
|
||||
"python3",
|
||||
str(main_path.joinpath("podman_compose.py")),
|
||||
"-f",
|
||||
str(
|
||||
main_path.joinpath("tests", "extends_w_empty_service", "docker-compose.yml")
|
||||
),
|
||||
"up",
|
||||
"-d",
|
||||
]
|
||||
|
||||
_, _, returncode = capture(command_up)
|
||||
assert 0 == returncode
|
78
tests/test_podman_compose_config.py
Normal file
78
tests/test_podman_compose_config.py
Normal file
@ -0,0 +1,78 @@
|
||||
"""
|
||||
test_podman_compose_config.py
|
||||
|
||||
Tests the podman-compose config command which is used to return defined compose services.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
import os
|
||||
from test_podman_compose import capture
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def profile_compose_file(test_path):
|
||||
""" "Returns the path to the `profile` compose file used for this test module"""
|
||||
return os.path.join(test_path, "profile", "docker-compose.yml")
|
||||
|
||||
|
||||
def test_config_no_profiles(podman_compose_path, profile_compose_file):
|
||||
"""
|
||||
Tests podman-compose config command without profile enablement.
|
||||
|
||||
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
|
||||
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
|
||||
"""
|
||||
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file, "config"]
|
||||
|
||||
out, _, return_code = capture(config_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
string_output = out.decode("utf-8")
|
||||
assert "default-service" in string_output
|
||||
assert "service-1" not in string_output
|
||||
assert "service-2" not in string_output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"profiles, expected_services",
|
||||
[
|
||||
(
|
||||
["--profile", "profile-1", "config"],
|
||||
{"default-service": True, "service-1": True, "service-2": False},
|
||||
),
|
||||
(
|
||||
["--profile", "profile-2", "config"],
|
||||
{"default-service": True, "service-1": False, "service-2": True},
|
||||
),
|
||||
(
|
||||
["--profile", "profile-1", "--profile", "profile-2", "config"],
|
||||
{"default-service": True, "service-1": True, "service-2": True},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_config_profiles(
|
||||
podman_compose_path, profile_compose_file, profiles, expected_services
|
||||
):
|
||||
"""
|
||||
Tests podman-compose
|
||||
:param podman_compose_path: The fixture used to specify the path to the podman compose file.
|
||||
:param profile_compose_file: The fixtued used to specify the path to the "profile" compose used in the test.
|
||||
:param profiles: The enabled profiles for the parameterized test.
|
||||
:param expected_services: Dictionary used to model the expected "enabled" services in the profile.
|
||||
Key = service name, Value = True if the service is enabled, otherwise False.
|
||||
"""
|
||||
config_cmd = ["python3", podman_compose_path, "-f", profile_compose_file]
|
||||
config_cmd.extend(profiles)
|
||||
|
||||
out, _, return_code = capture(config_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
actual_output = out.decode("utf-8")
|
||||
|
||||
assert len(expected_services) == 3
|
||||
|
||||
actual_services = {}
|
||||
for service, _ in expected_services.items():
|
||||
actual_services[service] = service in actual_output
|
||||
|
||||
assert expected_services == actual_services
|
71
tests/test_podman_compose_include.py
Normal file
71
tests/test_podman_compose_include.py
Normal file
@ -0,0 +1,71 @@
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture(command):
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
return out, err, proc.returncode
|
||||
|
||||
|
||||
def test_podman_compose_include():
|
||||
"""
|
||||
Test that podman-compose can execute podman-compose -f <file> up with include
|
||||
:return:
|
||||
"""
|
||||
main_path = Path(__file__).parent.parent
|
||||
|
||||
command_up = [
|
||||
"python3",
|
||||
str(main_path.joinpath("podman_compose.py")),
|
||||
"-f",
|
||||
str(main_path.joinpath("tests", "include", "docker-compose.yaml")),
|
||||
"up",
|
||||
"-d",
|
||||
]
|
||||
|
||||
command_check_container = [
|
||||
"podman",
|
||||
"ps",
|
||||
"-a",
|
||||
"--filter",
|
||||
"label=io.podman.compose.project=include",
|
||||
"--format",
|
||||
'"{{.Image}}"',
|
||||
]
|
||||
|
||||
command_container_id = [
|
||||
"podman",
|
||||
"ps",
|
||||
"-a",
|
||||
"--filter",
|
||||
"label=io.podman.compose.project=include",
|
||||
"--format",
|
||||
'"{{.ID}}"',
|
||||
]
|
||||
|
||||
command_down = ["podman", "rm", "--force", "CONTAINER_ID"]
|
||||
|
||||
out, _, returncode = capture(command_up)
|
||||
assert 0 == returncode
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b'"docker.io/library/busybox:latest"\n'
|
||||
# Get container ID to remove it
|
||||
out, _, returncode = capture(command_container_id)
|
||||
assert 0 == returncode
|
||||
assert out != b""
|
||||
container_id = out.decode().strip().replace('"', "")
|
||||
command_down[3] = container_id
|
||||
out, _, returncode = capture(command_down)
|
||||
# cleanup test image(tags)
|
||||
assert 0 == returncode
|
||||
assert out != b""
|
||||
# check container did not exists anymore
|
||||
out, _, returncode = capture(command_check_container)
|
||||
assert 0 == returncode
|
||||
assert out == b""
|
89
tests/test_podman_compose_up_down.py
Normal file
89
tests/test_podman_compose_up_down.py
Normal file
@ -0,0 +1,89 @@
|
||||
"""
|
||||
test_podman_compose_up_down.py
|
||||
|
||||
Tests the podman compose up and down commands used to create and remove services.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
import os
|
||||
from test_podman_compose import capture
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def profile_compose_file(test_path):
|
||||
""" "Returns the path to the `profile` compose file used for this test module"""
|
||||
return os.path.join(test_path, "profile", "docker-compose.yml")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def teardown(podman_compose_path, profile_compose_file):
|
||||
"""
|
||||
Ensures that the services within the "profile compose file" are removed between each test case.
|
||||
|
||||
:param podman_compose_path: The path to the podman compose script.
|
||||
:param profile_compose_file: The path to the compose file used for this test module.
|
||||
"""
|
||||
# run the test case
|
||||
yield
|
||||
|
||||
down_cmd = [
|
||||
"python3",
|
||||
podman_compose_path,
|
||||
"--profile",
|
||||
"profile-1",
|
||||
"--profile",
|
||||
"profile-2",
|
||||
"-f",
|
||||
profile_compose_file,
|
||||
"down",
|
||||
]
|
||||
capture(down_cmd)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"profiles, expected_services",
|
||||
[
|
||||
(
|
||||
["--profile", "profile-1", "up", "-d"],
|
||||
{"default-service": True, "service-1": True, "service-2": False},
|
||||
),
|
||||
(
|
||||
["--profile", "profile-2", "up", "-d"],
|
||||
{"default-service": True, "service-1": False, "service-2": True},
|
||||
),
|
||||
(
|
||||
["--profile", "profile-1", "--profile", "profile-2", "up", "-d"],
|
||||
{"default-service": True, "service-1": True, "service-2": True},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_up(podman_compose_path, profile_compose_file, profiles, expected_services):
|
||||
up_cmd = [
|
||||
"python3",
|
||||
podman_compose_path,
|
||||
"-f",
|
||||
profile_compose_file,
|
||||
]
|
||||
up_cmd.extend(profiles)
|
||||
|
||||
out, _, return_code = capture(up_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
check_cmd = [
|
||||
"podman",
|
||||
"container",
|
||||
"ps",
|
||||
"--format",
|
||||
'"{{.Names}}"',
|
||||
]
|
||||
out, _, return_code = capture(check_cmd)
|
||||
assert return_code == 0
|
||||
|
||||
assert len(expected_services) == 3
|
||||
actual_output = out.decode("utf-8")
|
||||
|
||||
actual_services = {}
|
||||
for service, _ in expected_services.items():
|
||||
actual_services[service] = service in actual_output
|
||||
|
||||
assert expected_services == actual_services
|
9
tests/testlogs/docker-compose.yml
Normal file
9
tests/testlogs/docker-compose.yml
Normal file
@ -0,0 +1,9 @@
|
||||
version: "3"
|
||||
services:
|
||||
loop1:
|
||||
image: busybox
|
||||
command: ["/bin/sh", "-c", "for i in `seq 1 10000`; do echo \"loop1: $$i\"; sleep 1; done"]
|
||||
loop2:
|
||||
image: busybox
|
||||
command: ["/bin/sh", "-c", "for i in `seq 1 10000`; do echo \"loop2: $$i\"; sleep 3; done"]
|
||||
|
15
tests/uidmaps/docker-compose.yml
Normal file
15
tests/uidmaps/docker-compose.yml
Normal file
@ -0,0 +1,15 @@
|
||||
version: "3.7"
|
||||
services:
|
||||
touch:
|
||||
image: busybox
|
||||
command: 'touch /mnt/test'
|
||||
volumes:
|
||||
- ./:/mnt
|
||||
user: 999:999
|
||||
x-podman:
|
||||
uidmaps:
|
||||
- "0:1:1"
|
||||
- "999:0:1"
|
||||
gidmaps:
|
||||
- "0:1:1"
|
||||
- "999:0:1"
|
@ -4,6 +4,7 @@ services:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8000"]
|
||||
working_dir: /var/www/html
|
||||
restart: always
|
||||
volumes:
|
||||
- /var/www/html
|
||||
tmpfs:
|
||||
@ -12,6 +13,7 @@ services:
|
||||
web1:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8001"]
|
||||
restart: unless-stopped
|
||||
working_dir: /var/www/html
|
||||
volumes:
|
||||
- myvol1:/var/www/html:ro,z
|
||||
@ -32,6 +34,7 @@ services:
|
||||
- data3:/var/www/html_data3
|
||||
|
||||
volumes:
|
||||
myvol1:
|
||||
myvol2:
|
||||
labels:
|
||||
mylabel: myval
|
||||
|
7
tests/volumes_merge/docker-compose.override.yaml
Normal file
7
tests/volumes_merge/docker-compose.override.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
volumes:
|
||||
- ./override.txt:/var/www/html/index.html:ro,z
|
||||
- ./override.txt:/var/www/html/index2.html:z
|
||||
- ./override.txt:/var/www/html/index3.html
|
11
tests/volumes_merge/docker-compose.yaml
Normal file
11
tests/volumes_merge/docker-compose.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
image: busybox
|
||||
command: ["/bin/busybox", "httpd", "-f", "-h", "/var/www/html", "-p", "8080"]
|
||||
ports:
|
||||
- 8080:8080
|
||||
volumes:
|
||||
- ./index.txt:/var/www/html/index.html:ro,z
|
||||
- ./index.txt:/var/www/html/index2.html
|
||||
- ./index.txt:/var/www/html/index3.html:ro
|
1
tests/volumes_merge/index.txt
Normal file
1
tests/volumes_merge/index.txt
Normal file
@ -0,0 +1 @@
|
||||
The file from docker-compose.yaml
|
1
tests/volumes_merge/override.txt
Normal file
1
tests/volumes_merge/override.txt
Normal file
@ -0,0 +1 @@
|
||||
The file from docker-compose.override.yaml
|
Reference in New Issue
Block a user