Merge branch 'dev-2' into 'main'
Add a lot of things ! See merge request odoo-openupgrade-wizard/odoo-openupgrade-wizard!2
This commit is contained in:
commit
c5da069c42
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -1,8 +1,9 @@
|
||||||
env
|
env
|
||||||
|
.gitlab-ci-venv
|
||||||
__pycache__
|
__pycache__
|
||||||
.tox
|
.tox
|
||||||
.coverage
|
.coverage
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
/tests/output_01/*
|
/tests/data/output/*
|
||||||
/tests/output_02/*
|
|
||||||
log/
|
log/
|
||||||
|
_auto_generated_odoo.cfg
|
||||||
|
|
|
||||||
102
.gitlab-ci.yml
102
.gitlab-ci.yml
|
|
@ -1,67 +1,63 @@
|
||||||
---
|
image: docker:19.03
|
||||||
image: python
|
|
||||||
|
|
||||||
cache:
|
services:
|
||||||
key: one-key-to-rule-them-all
|
- docker:dind
|
||||||
paths:
|
|
||||||
- .venv
|
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- prepare
|
|
||||||
- linting
|
- linting
|
||||||
- tests
|
- tests
|
||||||
|
|
||||||
|
# TODO, fix me : call all the pre-commit stuff instead.
|
||||||
install_tools:
|
black:
|
||||||
stage: prepare
|
stage: linting
|
||||||
|
image: python
|
||||||
script:
|
script:
|
||||||
- python -m venv .venv
|
# Install pipx
|
||||||
- source .venv/bin/activate
|
- pip install --user pipx
|
||||||
|
- python -m pipx ensurepath
|
||||||
|
- source ~/.profile
|
||||||
|
|
||||||
|
# Install black
|
||||||
|
- pipx install black
|
||||||
|
- black --version
|
||||||
|
|
||||||
|
# Log
|
||||||
|
|
||||||
|
# Call black Check
|
||||||
|
- black --check .
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
stage: tests
|
||||||
|
script:
|
||||||
|
# Install the the version 3.8. (the version 3.9 is the latest available)
|
||||||
|
# however, docker 19.03 comes with python3.8 and docker 20.10 comes with python3.10
|
||||||
|
- apk add python3==~3.8 python3-dev==~3.8
|
||||||
|
- apk add gcc g++ libffi-dev
|
||||||
|
- apk add git
|
||||||
|
- python3 -m venv /.gitlab-ci-venv
|
||||||
|
- source /.gitlab-ci-venv/bin/activate
|
||||||
|
- pip install --upgrade pip
|
||||||
- pip install poetry
|
- pip install poetry
|
||||||
- poetry --version
|
- poetry --version
|
||||||
- poetry install -v
|
- poetry install -v
|
||||||
- echo $PATH
|
- echo $PATH
|
||||||
- echo $PYTHONPATH
|
- echo $PYTHONPATH
|
||||||
|
|
||||||
black:
|
|
||||||
stage: linting
|
|
||||||
script:
|
|
||||||
# Intall pipx to install black
|
|
||||||
# otherwise, it fails.
|
|
||||||
# TODO, check with Coop It Easy
|
|
||||||
- pip install --user pipx
|
|
||||||
- python -m pipx ensurepath
|
|
||||||
- source ~/.profile
|
|
||||||
- pipx install black
|
|
||||||
# Classic CI
|
|
||||||
- black --version
|
|
||||||
- black --check .
|
|
||||||
|
|
||||||
pylint:
|
|
||||||
stage: linting
|
|
||||||
script:
|
|
||||||
- source .venv/bin/activate
|
|
||||||
- pylint --version
|
|
||||||
# - pylint --disable fixme ociedoo
|
|
||||||
# - pylint --disable fixme tests
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
stage: tests
|
|
||||||
image: python
|
|
||||||
cache: {}
|
|
||||||
script:
|
|
||||||
- pip install poetry
|
|
||||||
- poetry --version
|
|
||||||
- poetry install -v
|
|
||||||
- poetry run pytest --version
|
- poetry run pytest --version
|
||||||
- poetry run pytest --cov odoo_openupgrade_wizard -v
|
|
||||||
|
|
||||||
tox:
|
- poetry run pytest --verbosity=2 --exitfirst --cov odoo_openupgrade_wizard
|
||||||
stage: tests
|
tests/cli_01_init_test.py
|
||||||
image: themattrix/tox
|
tests/cli_02_get_code_test.py
|
||||||
cache: {}
|
tests/cli_03_docker_build_test.py
|
||||||
script:
|
tests/cli_04_run_test.py
|
||||||
- pip install poetry tox
|
tests/cli_05_execute_script_python_test.py
|
||||||
- tox --version
|
tests/cli_06_execute_script_sql_test.py
|
||||||
- poetry --version
|
tests/cli_07_upgrade_test.py
|
||||||
- tox
|
tests/cli_08_estimate_workload_test.py
|
||||||
|
|
||||||
|
# Disabled test on gitlab-ci :
|
||||||
|
# The following tests should work locally but doesn't on gitlab-ci
|
||||||
|
# because calling OdooRPC on mounted container
|
||||||
|
# in a container (gitlab) doesn't work.
|
||||||
|
|
||||||
|
# tests/cli_20_install_from_csv_test.py
|
||||||
|
# tests/cli_21_generate_module_analysis_test.py
|
||||||
|
|
|
||||||
|
|
@ -34,3 +34,8 @@ repos:
|
||||||
# rev: v1.0.0
|
# rev: v1.0.0
|
||||||
# hooks:
|
# hooks:
|
||||||
# - id: check-gitlab-ci
|
# - id: check-gitlab-ci
|
||||||
|
# - repo: https://github.com/kadrach/pre-commit-gitlabci-lint
|
||||||
|
# rev: 22d0495c9894e8b27cc37c2ed5d9a6b46385a44c
|
||||||
|
# hooks:
|
||||||
|
# - id: gitlabci-lint
|
||||||
|
# # args: ["https://custom.gitlab.host.com"]
|
||||||
|
|
|
||||||
8
CONTRIBUTORS.md
Normal file
8
CONTRIBUTORS.md
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Developers
|
||||||
|
|
||||||
|
* Sylvain LE GAL from GRAP (http://www.grap.coop)
|
||||||
|
|
||||||
|
# Reviewers
|
||||||
|
|
||||||
|
* Sébastien BEAU, from Akretion (https://akretion.com)
|
||||||
|
* Rémy TAYMANS, from Coop It Easy (https://coopiteasy.be/)
|
||||||
96
DEVELOP.md
96
DEVELOP.md
|
|
@ -1,21 +1,63 @@
|
||||||
# Requirements
|
# Tools to understand
|
||||||
|
|
||||||
TODO (poetry, etc...)
|
The library is using many tools. It is recommanded to understand that tools
|
||||||
|
to contribute to that project.
|
||||||
|
|
||||||
|
* Docker (https://www.docker.com/)
|
||||||
|
* Dind (Docker In Docker) for running docker in gitlab-ci. (https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#use-docker-in-docker)
|
||||||
|
* Gitlab CI (https://docs.gitlab.com/ee/ci/quick_start/index.html)
|
||||||
|
* openupgrade project (https://github.com/oca/openupgrade) and related openupgradelib (https://github.com/oca/openupgradelib)
|
||||||
|
* poetry (https://python-poetry.org/)
|
||||||
|
* odoorpc (https://github.com/OCA/odoorpc)
|
||||||
|
* git-aggregator (https://github.com/acsone/git-aggregator)
|
||||||
|
* click-odoo (https://github.com/acsone/click-odoo)
|
||||||
|
|
||||||
|
|
||||||
|
# Extra Developper Requirements
|
||||||
|
|
||||||
|
If you want to use this library without installing anything in your
|
||||||
|
system, execute the following steps, otherwise, go to 'Installation' part.
|
||||||
|
|
||||||
|
1. Run a docker container:
|
||||||
|
|
||||||
|
``docker run -it ubuntu:focal``
|
||||||
|
|
||||||
|
2. Execute the following commnands
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install git python3 python3-pip python3-venv
|
||||||
|
|
||||||
|
python3 -m pip install --user pipx
|
||||||
|
python3 -m pipx ensurepath
|
||||||
|
|
||||||
|
su root
|
||||||
|
|
||||||
|
pipx install virtualenv
|
||||||
|
pipx install poetry
|
||||||
|
```
|
||||||
|
|
||||||
# Installation
|
# Installation
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone https://gitlab.com/odoo-openupgrade-wizard/odoo-openupgrade-wizard/
|
git clone https://gitlab.com/odoo-openupgrade-wizard/odoo-openupgrade-wizard/
|
||||||
cd odoo-openupgrade-wizard
|
cd odoo-openupgrade-wizard
|
||||||
virtualenv env --python=python3.X
|
virtualenv env --python=python3
|
||||||
. ./env/bin/activate
|
. ./env/bin/activate
|
||||||
poetry install
|
poetry install
|
||||||
```
|
```
|
||||||
Note : ``python3.X`` should be >= to ``python3.6``
|
|
||||||
|
|
||||||
|
|
||||||
``odoo-openupgrade-wizard`` commands are now available in your virutalenv.
|
``odoo-openupgrade-wizard`` commands are now available in your virutalenv.
|
||||||
|
|
||||||
|
# Add python dependencies
|
||||||
|
|
||||||
|
If you add new dependencies, you have to:
|
||||||
|
|
||||||
|
- add the reference in the file ``pyproject.toml``
|
||||||
|
|
||||||
|
- run the following command in your virtualenv : ``poetry update``
|
||||||
|
|
||||||
# Run tests
|
# Run tests
|
||||||
|
|
||||||
## Via pytest
|
## Via pytest
|
||||||
|
|
@ -24,8 +66,19 @@ This will run tests only for the current ``python3.X`` version.
|
||||||
|
|
||||||
(in your virtualenv)
|
(in your virtualenv)
|
||||||
```
|
```
|
||||||
poetry run pytest --cov odoo_openupgrade_wizard -v
|
poetry run pytest --cov odoo_openupgrade_wizard --verbosity=2 --exitfirst
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Debug Test
|
||||||
|
```
|
||||||
|
poetry run pytest --verbosity=2 --exitfirst --cov odoo_openupgrade_wizard\
|
||||||
|
tests/cli_01_init_test.py\
|
||||||
|
tests/cli_02_get_code_test.py\
|
||||||
|
tests/cli_03_docker_build_test.py\
|
||||||
|
tests/cli_04_run_test.py\
|
||||||
|
tests/cli_06_execute_script_sql_test.py
|
||||||
|
```
|
||||||
|
|
||||||
## Via Tox
|
## Via Tox
|
||||||
|
|
||||||
This will run tests for all the python versions put in the ``tox.ini`` folder.
|
This will run tests for all the python versions put in the ``tox.ini`` folder.
|
||||||
|
|
@ -37,4 +90,33 @@ tox
|
||||||
|
|
||||||
Note : you should have all the python versions available in your local system.
|
Note : you should have all the python versions available in your local system.
|
||||||
|
|
||||||
# Structure of the project
|
|
||||||
|
```
|
||||||
|
sudo apt-get install python3.6 python3.6-distutils
|
||||||
|
sudo apt-get install python3.7 python3.7-distutils
|
||||||
|
sudo apt-get install python3.8 python3.8-distutils
|
||||||
|
sudo apt-get install python3.9 python3.9-distutils
|
||||||
|
```
|
||||||
|
|
||||||
|
## Via Gitlab Runner locally
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
# Install tools
|
||||||
|
pipx install gitlabci-local
|
||||||
|
|
||||||
|
# Run new available command
|
||||||
|
gitlabci-local
|
||||||
|
```
|
||||||
|
|
||||||
|
# Réferences
|
||||||
|
|
||||||
|
- how to install gitlab runner locally:
|
||||||
|
|
||||||
|
https://docs.gitlab.com/runner/install/linux-manually.html
|
||||||
|
|
||||||
|
- Check your CI locally. (French)
|
||||||
|
|
||||||
|
https://blog.stephane-robert.info/post/gitlab-valider-ci-yml/
|
||||||
|
|
||||||
|
https://blog.callr.tech/building-docker-images-with-gitlab-ci-best-practices/
|
||||||
|
|
|
||||||
40
INTERNAL_NOTES.md
Normal file
40
INTERNAL_NOTES.md
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
* with coop it easy :
|
||||||
|
- short_help of group decorator ? seems useless...
|
||||||
|
|
||||||
|
* add constrains on ``--step`` option.
|
||||||
|
|
||||||
|
* revert : set 777 to log and filestore to be able to write on this folder
|
||||||
|
inside the containers. TODO, ask to coop it easy or commown for better alternative.
|
||||||
|
|
||||||
|
* allow to call odoo-bin shell, via : https://github.com/d11wtq/dockerpty
|
||||||
|
(see https://github.com/docker/docker-py/issues/247)
|
||||||
|
|
||||||
|
|
||||||
|
# List of the series of odoo
|
||||||
|
# python version is defined, based on the OCA CI.
|
||||||
|
# https://github.com/OCA/oca-addons-repo-template/blob/master/src/.github/workflows/%7B%25%20if%20ci%20%3D%3D%20'GitHub'%20%25%7Dtest.yml%7B%25%20endif%20%25%7D.jinja
|
||||||
|
|
||||||
|
|
||||||
|
# tips
|
||||||
|
```
|
||||||
|
# execute sql request in postgres docker
|
||||||
|
docker exec db psql --username=odoo --dbname=test_v12 -c "update res_partner set ""email"" = 'bib@bqsdfqsdf.txt';"
|
||||||
|
```
|
||||||
|
|
||||||
|
# TODO Nice To have
|
||||||
|
|
||||||
|
- Fix gitlabci-local. For the time being, it is not possible to debug
|
||||||
|
locally. (there are extra bugs locally that doesn't occures on gitlab,
|
||||||
|
in ``cli_B_03_run_test.py``...
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Try gitlab runner
|
||||||
|
|
||||||
|
curl -LJO "https://gitlab-runner-downloads.s3.amazonaws.com/latest/deb/gitlab-runner_amd64.deb"
|
||||||
|
|
||||||
|
sudo dpkg -i gitlab-runner_amd64.deb
|
||||||
|
|
||||||
|
(https://docs.gitlab.com/runner/install/linux-manually.html)
|
||||||
227
README.md
227
README.md
|
|
@ -1,3 +1,11 @@
|
||||||
|
[](https://gitlab.com/odoo-openupgrade-wizard/odoo-openupgrade-wizard/-/pipelines)
|
||||||
|
|
||||||
|
[](https://codecov.io/gl/odoo-openupgrade-wizard/odoo-openupgrade-wizard)
|
||||||
|
|
||||||
|
[](https://www.gnu.org/licenses/agpl-3.0)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# odoo-openupgrade-wizard
|
# odoo-openupgrade-wizard
|
||||||
|
|
||||||
Odoo Openupgrade Wizard is a tool that helps developpers to make major
|
Odoo Openupgrade Wizard is a tool that helps developpers to make major
|
||||||
|
|
@ -11,14 +19,26 @@ this tool is useful for complex migrations:
|
||||||
It will create a migration environment (with all the code available)
|
It will create a migration environment (with all the code available)
|
||||||
and provides helpers to run (and replay) migrations until it works.
|
and provides helpers to run (and replay) migrations until it works.
|
||||||
|
|
||||||
## Commands
|
* To develop and contribute to the library, refer to the ``DEVELOP.md`` file.
|
||||||
|
* Refer to the ``ROADMAP.md`` file to see the current limitation, bugs, and task to do.
|
||||||
|
* See authors in the ``CONTRIBUTORS.md`` file.
|
||||||
|
|
||||||
### ``odoo-openupgrade-wizard init``
|
# Installation
|
||||||
|
|
||||||
|
``pipx install odoo-openupgrade-wizard``.
|
||||||
|
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard init``
|
||||||
|
|
||||||
```
|
```
|
||||||
odoo-openupgrade-wizard init\
|
odoo-openupgrade-wizard init\
|
||||||
--initial-version=10.0\
|
--initial-release=10.0\
|
||||||
--final-version=12.0\
|
--final-release=12.0\
|
||||||
|
--project-name=my-customer-10-12\
|
||||||
--extra-repository=OCA/web,OCA/server-tools
|
--extra-repository=OCA/web,OCA/server-tools
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -27,17 +47,10 @@ This will generate the following structure :
|
||||||
|
|
||||||
```
|
```
|
||||||
config.yml
|
config.yml
|
||||||
|
modules.csv
|
||||||
log/
|
log/
|
||||||
2022_03_25__23_12_41__init.log
|
2022_03_25__23_12_41__init.log
|
||||||
...
|
...
|
||||||
repos/
|
|
||||||
10.0.yml
|
|
||||||
11.0.yml
|
|
||||||
12.0.yml
|
|
||||||
requirements/
|
|
||||||
10.0_requirements.txt
|
|
||||||
11.0_requirements.txt
|
|
||||||
12.0_requirements.txt
|
|
||||||
scripts/
|
scripts/
|
||||||
step_1__update__10.0/
|
step_1__update__10.0/
|
||||||
pre-migration.sql
|
pre-migration.sql
|
||||||
|
|
@ -52,31 +65,195 @@ scripts/
|
||||||
pre-migration.sql
|
pre-migration.sql
|
||||||
post-migration.py
|
post-migration.py
|
||||||
src/
|
src/
|
||||||
|
env_10.0/
|
||||||
|
debian_requirements.txt
|
||||||
|
Dockerfile
|
||||||
|
odoo.cfg
|
||||||
|
python_requirements.txt
|
||||||
|
repos.yml
|
||||||
|
src/
|
||||||
|
odoo/
|
||||||
|
openupgrade/
|
||||||
|
env_11.0/
|
||||||
|
...
|
||||||
|
env_12.0/
|
||||||
|
...
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
* ``log/`` will contains all the log of the ``odoo-openupgrade-wizard``
|
* ``config.xml`` is the main configuration file of your project.
|
||||||
|
|
||||||
|
* ``modules.csv`` file is an optional file. You can fill it with the list
|
||||||
|
of your modules installed on your production. The first column of this
|
||||||
|
file should contain the technical name of the module.
|
||||||
|
|
||||||
|
* ``log`` folder will contains all the log of the ``odoo-openupgrade-wizard``
|
||||||
and the logs of the odoo instance that will be executed.
|
and the logs of the odoo instance that will be executed.
|
||||||
|
|
||||||
|
* ``scripts`` folder contains a folder per migration step. In each step folder:
|
||||||
|
- ``pre-migration.sql`` can contains extra SQL queries you want to execute
|
||||||
|
before beginning the step.
|
||||||
|
- ``post-migration.py`` can contains extra python command to execute
|
||||||
|
after the execution of the step. (the orm will be available)
|
||||||
|
Script will be executed with ``click-odoo`` command. All the ORM is available
|
||||||
|
via the ``env`` variable.
|
||||||
|
|
||||||
* ``repos/`` contains a file per version of odoo, that enumerates the
|
* ``src`` folder contains a folder per Odoo version. In each environment folder:
|
||||||
list of the repositories to use to run each odoo instance.
|
|
||||||
|
- ``repos.yml`` enumerates the list of the repositories to use to run the odoo instance.
|
||||||
The syntax should respect the ``gitaggregate`` command.
|
The syntax should respect the ``gitaggregate`` command.
|
||||||
(See : https://pypi.org/project/git-aggregator/)
|
(See : https://pypi.org/project/git-aggregator/)
|
||||||
Repo files are pre-generated. You can update them with your custom settings.
|
Repo files are pre-generated. You can update them with your custom settings.
|
||||||
(custom branches, extra PRs, git shallow options, etc...)
|
(custom branches, extra PRs, git shallow options, etc...)
|
||||||
|
|
||||||
* ``requirements/`` contains a file per version of odoo, that enumerates the
|
- ``python_requirements.txt`` enumerates the list of extra python librairies
|
||||||
list of extra python librairies required to run each odoo instance.
|
required to run the odoo instance.
|
||||||
The syntax should respect the ``pip install -r`` command.
|
The syntax should respect the ``pip install -r`` command.
|
||||||
(See : https://pip.pypa.io/en/stable/reference/requirements-file-format/)
|
(See : https://pip.pypa.io/en/stable/reference/requirements-file-format/)
|
||||||
|
|
||||||
* ``scripts`` contains a folder per migration step. In each step folder:
|
- ``debian_requirements.txt`` enumerates the list of extra system librairies
|
||||||
- ``pre-migration.sql`` can contains extra SQL queries you want to execute
|
required to run the odoo instance.
|
||||||
before beginning the step.
|
|
||||||
- ``post-migration.py`` can contains extra python command to execute
|
|
||||||
after the execution of the step. (the orm will be available)
|
|
||||||
|
|
||||||
# TODO
|
- ``odoo.cfg`` file. Add here extra configuration required for your custom modules.
|
||||||
|
the classical keys (``db_host``, ``db_port``, etc...) are automatically autogenerated.
|
||||||
|
|
||||||
* with coop it easy :
|
At this step, you should change the autogenerated files.
|
||||||
- short_help of group decorator ? seems useless...
|
You can use default files, if you have a very simple odoo instance without custom code,
|
||||||
|
extra repositories, or dependencies...
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard get-code``
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard get-code
|
||||||
|
```
|
||||||
|
|
||||||
|
This command will simply get all the Odoo code required to run all the steps
|
||||||
|
for your migration with the ``gitaggregate`` tools.
|
||||||
|
|
||||||
|
The code is defined in the ``repos.yml`` of each sub folders.
|
||||||
|
|
||||||
|
**Note**
|
||||||
|
|
||||||
|
* This step could take a big while !
|
||||||
|
|
||||||
|
**Optional arguments**
|
||||||
|
|
||||||
|
if you want to update the code of some given releases, you can provide an extra parameter:
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard get-code --releases 10.0,11.0
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard docker-build``
|
||||||
|
|
||||||
|
This will build local docker images that will be used in the following steps.
|
||||||
|
|
||||||
|
This script will pull official odoo docker images, defined in the ``Dockerfile`` of
|
||||||
|
each folder, and build a custom images on top the official one, installing inside
|
||||||
|
custom librairies defined in ``debian_requirements.txt``, ``python_requirements.txt``.
|
||||||
|
|
||||||
|
At this end of this step executing the following command should show a docker image per version.
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
$ docker images --filter "reference=odoo-openupgrade-wizard-*"
|
||||||
|
|
||||||
|
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||||
|
odoo-openupgrade-wizard-image---my-customer-10-12---12.0 latest ef664c366208 2 weeks ago 1.39GB
|
||||||
|
odoo-openupgrade-wizard-image---my-customer-10-12---11.0 latest 24e283fe4ae4 2 weeks ago 1.16GB
|
||||||
|
odoo-openupgrade-wizard-image---my-customer-10-12---10.0 latest 9d94dce2bd4e 2 weeks ago 924MB
|
||||||
|
```
|
||||||
|
|
||||||
|
**Optional arguments**
|
||||||
|
|
||||||
|
* if you want to (re)build an image for some given releases, you can provide
|
||||||
|
an extra parameter: ``--releases 10.0,12.0``
|
||||||
|
|
||||||
|
**Note**
|
||||||
|
|
||||||
|
* This step could take a big while also !
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard run``
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard run\
|
||||||
|
--step 1\
|
||||||
|
--database DB_NAME
|
||||||
|
```
|
||||||
|
|
||||||
|
Run an Odoo instance with the environment defined by the step argument.
|
||||||
|
|
||||||
|
The database will be created, if it doesn't exists.
|
||||||
|
|
||||||
|
if ``stop-after-init`` is disabled, the odoo instance will be available
|
||||||
|
at your host, at the following url : http://localhost:9069
|
||||||
|
(Port depends on your ``host_odoo_xmlrpc_port`` setting of your ``config.yml`` file)
|
||||||
|
|
||||||
|
**Optional arguments**
|
||||||
|
|
||||||
|
* You can add ``--init-modules=purchase,sale`` to install modules.
|
||||||
|
|
||||||
|
* You can add ``stop-after-init`` flag to turn off the process at the end
|
||||||
|
of the installation.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard install-from-csv``
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard install-from-csv\
|
||||||
|
--database DB_NAME
|
||||||
|
```
|
||||||
|
|
||||||
|
Install the list of the modules defined in your ``modules.csv`` files on the
|
||||||
|
given database.
|
||||||
|
|
||||||
|
The database will be created, if it doesn't exists.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard upgrade``
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard upgrade\
|
||||||
|
--database DB_NAME
|
||||||
|
```
|
||||||
|
|
||||||
|
Realize an upgrade of the database from the initial release to
|
||||||
|
the final release, following the different steps.
|
||||||
|
|
||||||
|
For each step, it will :
|
||||||
|
|
||||||
|
1. Execute the ``pre-migration.sql`` of the step.
|
||||||
|
2. Realize an "update all" (in an upgrade or update context)
|
||||||
|
3. Execute the scripts via XML-RPC (via ``odoorpc``) defined in
|
||||||
|
the ``post-migration.py`` file.
|
||||||
|
|
||||||
|
**Optional arguments**
|
||||||
|
|
||||||
|
* You can add ``--first-step=2`` to start at the second step.
|
||||||
|
|
||||||
|
* You can add ``--last-step=3`` to end at the third step.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## ``odoo-openupgrade-wizard generate-module-analysis``
|
||||||
|
|
||||||
|
```
|
||||||
|
odoo-openupgrade-wizard generate-module-analysis\
|
||||||
|
--database DB_NAME
|
||||||
|
--step 2
|
||||||
|
--modules MODULE_LIST
|
||||||
|
```
|
||||||
|
|
||||||
|
Realize an analyze between the target version (in parameter via the step argument)
|
||||||
|
and the previous version. It will generate analysis_file.txt files present
|
||||||
|
in OpenUpgrade project.
|
||||||
|
You can also use this fonction to analyze differences for custom / OCA modules
|
||||||
|
between several versions, in case of refactoring.
|
||||||
|
|
|
||||||
45
ROADMAP.md
Normal file
45
ROADMAP.md
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Python Version
|
||||||
|
|
||||||
|
* py310 is not available, due to dependencies to ``odoorpc`` that raise an error :
|
||||||
|
``ERROR tests/cli_A_init_test.py - AttributeError: module 'collections' has no attribute 'MutableMapping'``
|
||||||
|
Follow bug : https://stackoverflow.com/questions/69512672/getting-attributeerror-module-collections-has-no-attribute-mutablemapping-w
|
||||||
|
|
||||||
|
# openUpgradelib Versions
|
||||||
|
|
||||||
|
* ``openupgradelib`` requires a new feature psycopg2.sql since
|
||||||
|
(21 Aug 2019)
|
||||||
|
https://github.com/OCA/openupgradelib/commit/7408580e4469ba4b0cabb923da7facd71567a2fb
|
||||||
|
so we pin openupgradelib==2.0.0 (21 Jul 2018)
|
||||||
|
|
||||||
|
The python version in the Odoo:12 docker image is : ``Python 3.5.3 (default, Apr 5 2021, 09:00:41)`` that is very old.
|
||||||
|
|
||||||
|
|
||||||
|
- https://github.com/OCA/openupgradelib/issues/248
|
||||||
|
- https://github.com/OCA/openupgradelib/issues/288
|
||||||
|
- https://github.com/OCA/openupgradelib.git@ed01555b8ae20f66b3af178c8ecaf6edd110ce75#egg=openupgradelib
|
||||||
|
|
||||||
|
TODO : Fix via another way (other way than pining ``openuppgradelib`` version) the problem of old odoo versions. (it makes the upgrade failing for old revision (V8, etc...))
|
||||||
|
|
||||||
|
# Gitlab-CI
|
||||||
|
|
||||||
|
* for the time being, Features requiring ``odoorpc`` are failing in gitlab-CI.
|
||||||
|
Tests are working locally but there is a network problem. For that reason, tests witch names
|
||||||
|
begins by ``cli_2`` like (``cli_20_install_from_csv_test.py``) are disabled in ``.gitlab-ci.yml``.
|
||||||
|
|
||||||
|
TODO : work with Pierrick Brun, to run gitlab-runner on Akretion CI (without docker), to see if it is
|
||||||
|
fixing the problem.
|
||||||
|
|
||||||
|
# Features Work In Progress
|
||||||
|
|
||||||
|
* Add a tools to analyze workload.
|
||||||
|
|
||||||
|
# Possible Improvments
|
||||||
|
|
||||||
|
* select ``without-demo all`` depending on if the database
|
||||||
|
is created or not (and if current database contains demo data, checking if base.user_demo exists ?)
|
||||||
|
|
||||||
|
# Other points not in the scope of GRAP work
|
||||||
|
|
||||||
|
* Allow to use custom docker images.
|
||||||
|
|
||||||
|
* Check if there are default values for containers, limiting ressources.
|
||||||
|
|
@ -1,25 +1,38 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import yaml
|
import yaml
|
||||||
|
from click_loglevel import LogLevel
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
import odoo_openupgrade_wizard
|
import odoo_openupgrade_wizard
|
||||||
|
from odoo_openupgrade_wizard.cli_docker_build import docker_build
|
||||||
|
from odoo_openupgrade_wizard.cli_estimate_workload import estimate_workload
|
||||||
|
from odoo_openupgrade_wizard.cli_execute_script_python import (
|
||||||
|
execute_script_python,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.cli_execute_script_sql import execute_script_sql
|
||||||
|
from odoo_openupgrade_wizard.cli_generate_module_analysis import (
|
||||||
|
generate_module_analysis,
|
||||||
|
)
|
||||||
from odoo_openupgrade_wizard.cli_get_code import get_code
|
from odoo_openupgrade_wizard.cli_get_code import get_code
|
||||||
from odoo_openupgrade_wizard.cli_init import init
|
from odoo_openupgrade_wizard.cli_init import init
|
||||||
|
from odoo_openupgrade_wizard.cli_install_from_csv import install_from_csv
|
||||||
|
from odoo_openupgrade_wizard.cli_run import run
|
||||||
|
from odoo_openupgrade_wizard.cli_upgrade import upgrade
|
||||||
from odoo_openupgrade_wizard.tools_system import ensure_folder_exists
|
from odoo_openupgrade_wizard.tools_system import ensure_folder_exists
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
@click.version_option(version=odoo_openupgrade_wizard.__version__)
|
@click.version_option(version=odoo_openupgrade_wizard.__version__)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-ef",
|
|
||||||
"--env-folder",
|
"--env-folder",
|
||||||
default="./",
|
default="./",
|
||||||
type=click.Path(
|
type=click.Path(
|
||||||
exists=True,
|
exists=True,
|
||||||
dir_okay=True,
|
|
||||||
file_okay=False,
|
file_okay=False,
|
||||||
writable=True,
|
writable=True,
|
||||||
resolve_path=True,
|
resolve_path=True,
|
||||||
|
|
@ -29,19 +42,23 @@ from odoo_openupgrade_wizard.tools_system import ensure_folder_exists
|
||||||
" use current folder (./).",
|
" use current folder (./).",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-fs",
|
|
||||||
"--filestore-folder",
|
"--filestore-folder",
|
||||||
type=click.Path(dir_okay=True, file_okay=False, resolve_path=True),
|
type=click.Path(
|
||||||
|
exists=True, file_okay=False, writable=True, resolve_path=True
|
||||||
|
),
|
||||||
help="Folder that contains the Odoo filestore of the database(s)"
|
help="Folder that contains the Odoo filestore of the database(s)"
|
||||||
" to migrate. Let empty to use the subfolder 'filestore' of the"
|
" to migrate. Let empty to use the subfolder 'filestore' of the"
|
||||||
" environment folder.",
|
" environment folder.",
|
||||||
)
|
)
|
||||||
|
@click.option("-l", "--log-level", type=LogLevel(), default=logging.INFO)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def main(ctx, env_folder, filestore_folder):
|
def main(ctx, env_folder, filestore_folder, log_level):
|
||||||
"""
|
"""
|
||||||
Provides a command set to perform odoo Community Edition migrations.
|
Provides a command set to perform odoo Community Edition migrations.
|
||||||
"""
|
"""
|
||||||
date_begin = datetime.datetime.now()
|
date_begin = datetime.datetime.now()
|
||||||
|
logger.remove()
|
||||||
|
logger.add(sys.stderr, level=log_level)
|
||||||
logger.debug("Beginning script '%s' ..." % (ctx.invoked_subcommand))
|
logger.debug("Beginning script '%s' ..." % (ctx.invoked_subcommand))
|
||||||
if not isinstance(ctx.obj, dict):
|
if not isinstance(ctx.obj, dict):
|
||||||
ctx.obj = {}
|
ctx.obj = {}
|
||||||
|
|
@ -49,6 +66,10 @@ def main(ctx, env_folder, filestore_folder):
|
||||||
# Define all the folder required by the tools
|
# Define all the folder required by the tools
|
||||||
env_folder_path = Path(env_folder)
|
env_folder_path = Path(env_folder)
|
||||||
src_folder_path = env_folder_path / Path("./src/")
|
src_folder_path = env_folder_path / Path("./src/")
|
||||||
|
# Note: postgres folder should be a subfolder, because
|
||||||
|
# the parent folder will contain a .gitignore file
|
||||||
|
# that the postgres docker image doesn't like
|
||||||
|
postgres_folder_path = env_folder_path / Path("./postgres_data/data")
|
||||||
script_folder_path = env_folder_path / Path("./scripts/")
|
script_folder_path = env_folder_path / Path("./scripts/")
|
||||||
log_folder_path = env_folder_path / Path("./log/")
|
log_folder_path = env_folder_path / Path("./log/")
|
||||||
if not filestore_folder:
|
if not filestore_folder:
|
||||||
|
|
@ -57,39 +78,46 @@ def main(ctx, env_folder, filestore_folder):
|
||||||
filestore_folder_path = Path(filestore_folder)
|
filestore_folder_path = Path(filestore_folder)
|
||||||
|
|
||||||
# ensure log folder exists
|
# ensure log folder exists
|
||||||
ensure_folder_exists(log_folder_path)
|
ensure_folder_exists(log_folder_path, mode="777", git_ignore_content=True)
|
||||||
|
|
||||||
# Create log file
|
# Create log file
|
||||||
log_file_path = log_folder_path / Path(
|
log_prefix = "{}__{}".format(
|
||||||
"{}__{}.log".format(
|
|
||||||
date_begin.strftime("%Y_%m_%d__%H_%M_%S"), ctx.invoked_subcommand
|
date_begin.strftime("%Y_%m_%d__%H_%M_%S"), ctx.invoked_subcommand
|
||||||
)
|
)
|
||||||
)
|
log_file_path = log_folder_path / Path(log_prefix + ".log")
|
||||||
logger.add(log_file_path)
|
logger.add(log_file_path)
|
||||||
|
|
||||||
config_file_path = env_folder_path / Path("config.yml")
|
config_file_path = env_folder_path / Path("config.yml")
|
||||||
|
module_file_path = env_folder_path / Path("modules.csv")
|
||||||
|
|
||||||
# Add all global values in the context
|
# Add all global values in the context
|
||||||
ctx.obj["env_folder_path"] = env_folder_path
|
ctx.obj["env_folder_path"] = env_folder_path
|
||||||
ctx.obj["src_folder_path"] = src_folder_path
|
ctx.obj["src_folder_path"] = src_folder_path
|
||||||
|
ctx.obj["postgres_folder_path"] = postgres_folder_path
|
||||||
ctx.obj["script_folder_path"] = script_folder_path
|
ctx.obj["script_folder_path"] = script_folder_path
|
||||||
ctx.obj["log_folder_path"] = log_folder_path
|
ctx.obj["log_folder_path"] = log_folder_path
|
||||||
|
ctx.obj["log_prefix"] = log_prefix
|
||||||
ctx.obj["filestore_folder_path"] = filestore_folder_path
|
ctx.obj["filestore_folder_path"] = filestore_folder_path
|
||||||
|
|
||||||
ctx.obj["config_file_path"] = config_file_path
|
ctx.obj["config_file_path"] = config_file_path
|
||||||
|
ctx.obj["module_file_path"] = module_file_path
|
||||||
|
|
||||||
# Load the main configuration file
|
# Load the main configuration file
|
||||||
if config_file_path.exists():
|
if config_file_path.exists():
|
||||||
with open(config_file_path) as file:
|
with open(config_file_path) as file:
|
||||||
config = yaml.safe_load(file)
|
config = yaml.safe_load(file)
|
||||||
# for step in config["migration_steps"]:
|
|
||||||
# step["local_path"] = src_folder_path / Path(
|
|
||||||
# "env_%s" % step["version"]
|
|
||||||
# )
|
|
||||||
ctx.obj["config"] = config
|
ctx.obj["config"] = config
|
||||||
elif ctx.invoked_subcommand != "init":
|
elif ctx.invoked_subcommand != "init":
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
main.add_command(init)
|
main.add_command(docker_build)
|
||||||
|
main.add_command(estimate_workload)
|
||||||
|
main.add_command(execute_script_python)
|
||||||
|
main.add_command(execute_script_sql)
|
||||||
|
main.add_command(generate_module_analysis)
|
||||||
main.add_command(get_code)
|
main.add_command(get_code)
|
||||||
|
main.add_command(init)
|
||||||
|
main.add_command(install_from_csv)
|
||||||
|
main.add_command(run)
|
||||||
|
main.add_command(upgrade)
|
||||||
|
|
|
||||||
30
odoo_openupgrade_wizard/cli_docker_build.py
Normal file
30
odoo_openupgrade_wizard/cli_docker_build.py
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
import click
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
get_odoo_versions_from_options,
|
||||||
|
releases_options,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_docker import build_image
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import (
|
||||||
|
get_docker_image_tag,
|
||||||
|
get_odoo_env_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@releases_options
|
||||||
|
@click.pass_context
|
||||||
|
def docker_build(ctx, releases):
|
||||||
|
"""Build Odoo Docker Images. (One image per release)"""
|
||||||
|
|
||||||
|
for odoo_version in get_odoo_versions_from_options(ctx, releases):
|
||||||
|
logger.info(
|
||||||
|
"Building Odoo docker image for release '%s'. "
|
||||||
|
"This can take a while..." % (odoo_version["release"])
|
||||||
|
)
|
||||||
|
image = build_image(
|
||||||
|
get_odoo_env_path(ctx, odoo_version),
|
||||||
|
get_docker_image_tag(ctx, odoo_version),
|
||||||
|
)
|
||||||
|
logger.info("Docker Image build. '%s'" % image[0].tags[0])
|
||||||
37
odoo_openupgrade_wizard/cli_estimate_workload.py
Normal file
37
odoo_openupgrade_wizard/cli_estimate_workload.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard import templates
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo_module import Analysis
|
||||||
|
from odoo_openupgrade_wizard.tools_system import (
|
||||||
|
ensure_file_exists_from_template,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option(
|
||||||
|
"--analysis-file-path",
|
||||||
|
type=click.Path(
|
||||||
|
dir_okay=False,
|
||||||
|
),
|
||||||
|
default="./analysis.html",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def estimate_workload(ctx, analysis_file_path):
|
||||||
|
# Analyse
|
||||||
|
analysis = Analysis(ctx)
|
||||||
|
|
||||||
|
# Make some clean to display properly
|
||||||
|
analysis.modules = sorted(analysis.modules)
|
||||||
|
|
||||||
|
# Render html file
|
||||||
|
# TODO, make
|
||||||
|
ensure_file_exists_from_template(
|
||||||
|
Path(analysis_file_path),
|
||||||
|
templates.ANALYSIS_TEMPLATE,
|
||||||
|
ctx=ctx,
|
||||||
|
analysis=analysis,
|
||||||
|
current_date=datetime.now().strftime("%d/%m/%Y %H:%M:%S"),
|
||||||
|
)
|
||||||
32
odoo_openupgrade_wizard/cli_execute_script_python.py
Normal file
32
odoo_openupgrade_wizard/cli_execute_script_python.py
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option_required,
|
||||||
|
get_migration_step_from_options,
|
||||||
|
step_option,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import execute_click_odoo_python_files
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@step_option
|
||||||
|
@database_option_required
|
||||||
|
@click.option(
|
||||||
|
"--script-file-path",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(
|
||||||
|
exists=True,
|
||||||
|
dir_okay=False,
|
||||||
|
),
|
||||||
|
help="List of python files that will be executed, replacing the default"
|
||||||
|
" scripts placed in the migration step folder.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def execute_script_python(ctx, step, database, script_file_path):
|
||||||
|
migration_step = get_migration_step_from_options(ctx, step)
|
||||||
|
|
||||||
|
execute_click_odoo_python_files(
|
||||||
|
ctx, database, migration_step, [Path(x) for x in script_file_path]
|
||||||
|
)
|
||||||
34
odoo_openupgrade_wizard/cli_execute_script_sql.py
Normal file
34
odoo_openupgrade_wizard/cli_execute_script_sql.py
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option_required,
|
||||||
|
get_migration_step_from_options,
|
||||||
|
step_option,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import (
|
||||||
|
execute_sql_files_pre_migration,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@step_option
|
||||||
|
@database_option_required
|
||||||
|
@click.option(
|
||||||
|
"--script-file-path",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(
|
||||||
|
exists=True,
|
||||||
|
dir_okay=False,
|
||||||
|
),
|
||||||
|
help="List of SQL files that will be executed, replacing the default"
|
||||||
|
" scripts placed in the migration step folder.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def execute_script_sql(ctx, step, database, script_file_path):
|
||||||
|
migration_step = get_migration_step_from_options(ctx, step)
|
||||||
|
|
||||||
|
execute_sql_files_pre_migration(
|
||||||
|
ctx, database, migration_step, [Path(x) for x in script_file_path]
|
||||||
|
)
|
||||||
147
odoo_openupgrade_wizard/cli_generate_module_analysis.py
Normal file
147
odoo_openupgrade_wizard/cli_generate_module_analysis.py
Normal file
|
|
@ -0,0 +1,147 @@
|
||||||
|
import click
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option,
|
||||||
|
get_migration_steps_from_options,
|
||||||
|
step_option,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.configuration_version_dependant import (
|
||||||
|
generate_analysis_files,
|
||||||
|
generate_records,
|
||||||
|
get_installable_odoo_modules,
|
||||||
|
get_upgrade_analysis_module,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import (
|
||||||
|
get_odoo_env_path,
|
||||||
|
kill_odoo,
|
||||||
|
run_odoo,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo_instance import OdooInstance
|
||||||
|
from odoo_openupgrade_wizard.tools_system import ensure_folder_writable
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@step_option
|
||||||
|
@database_option
|
||||||
|
@click.option(
|
||||||
|
"-m",
|
||||||
|
"--modules",
|
||||||
|
type=str,
|
||||||
|
help="Coma-separated list of modules to analysis."
|
||||||
|
" Let empty to analyse all the Odoo modules.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def generate_module_analysis(ctx, step, database, modules):
|
||||||
|
|
||||||
|
migration_steps = get_migration_steps_from_options(ctx, step - 1, step)
|
||||||
|
|
||||||
|
initial_step = migration_steps[0].copy()
|
||||||
|
final_step = migration_steps[1].copy()
|
||||||
|
|
||||||
|
alternative_xml_rpc_port = ctx.obj["config"]["odoo_host_xmlrpc_port"] + 10
|
||||||
|
|
||||||
|
if not database:
|
||||||
|
database = "%s__analysis__" % (
|
||||||
|
ctx.obj["config"]["project_name"].replace("-", "_"),
|
||||||
|
)
|
||||||
|
|
||||||
|
initial_database = "%s_%s" % (
|
||||||
|
database,
|
||||||
|
str(initial_step["release"]).replace(".", ""),
|
||||||
|
)
|
||||||
|
final_database = "%s_%s" % (
|
||||||
|
database,
|
||||||
|
str(final_step["release"]).replace(".", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
modules = modules and modules.split(",") or []
|
||||||
|
|
||||||
|
# Force to be in openupgrade mode
|
||||||
|
initial_step["action"] = final_step["action"] = "upgrade"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# INITIAL : Run odoo and install analysis module
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
initial_step,
|
||||||
|
database=initial_database,
|
||||||
|
detached_container=False,
|
||||||
|
stop_after_init=True,
|
||||||
|
init=get_upgrade_analysis_module(initial_step),
|
||||||
|
)
|
||||||
|
|
||||||
|
# INITIAL : Run odoo for odoorpc
|
||||||
|
initial_container = run_odoo(
|
||||||
|
ctx,
|
||||||
|
initial_step,
|
||||||
|
database=initial_database,
|
||||||
|
detached_container=True,
|
||||||
|
)
|
||||||
|
# INITIAL : install modules to analyse and generate records
|
||||||
|
initial_instance = OdooInstance(ctx, initial_database)
|
||||||
|
initial_modules = (
|
||||||
|
modules
|
||||||
|
and modules
|
||||||
|
or get_installable_odoo_modules(initial_instance, initial_step)
|
||||||
|
)
|
||||||
|
initial_instance.install_modules(initial_modules)
|
||||||
|
generate_records(initial_instance, initial_step)
|
||||||
|
|
||||||
|
# FINAL : Run odoo and install analysis module
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
final_step,
|
||||||
|
database=final_database,
|
||||||
|
detached_container=False,
|
||||||
|
stop_after_init=True,
|
||||||
|
init=get_upgrade_analysis_module(final_step),
|
||||||
|
alternative_xml_rpc_port=alternative_xml_rpc_port,
|
||||||
|
)
|
||||||
|
|
||||||
|
# name of the first odoo instance inside the second odoo instance
|
||||||
|
odoo_initial_host_name = "odoo_initial_instance"
|
||||||
|
|
||||||
|
# FINAL : Run odoo for odoorpc and install modules to analyse
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
final_step,
|
||||||
|
database=final_database,
|
||||||
|
detached_container=True,
|
||||||
|
alternative_xml_rpc_port=alternative_xml_rpc_port,
|
||||||
|
links={initial_container.name: odoo_initial_host_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
# FINAL : install modules to analyse and generate records
|
||||||
|
final_instance = OdooInstance(
|
||||||
|
ctx,
|
||||||
|
final_database,
|
||||||
|
alternative_xml_rpc_port=alternative_xml_rpc_port,
|
||||||
|
)
|
||||||
|
final_modules = (
|
||||||
|
modules
|
||||||
|
and modules
|
||||||
|
or get_installable_odoo_modules(final_instance, final_step)
|
||||||
|
)
|
||||||
|
final_instance.install_modules(final_modules)
|
||||||
|
generate_records(final_instance, final_step)
|
||||||
|
|
||||||
|
# Make writable files and directories for "other"
|
||||||
|
# group to make possible to write analysis files
|
||||||
|
# for docker container user
|
||||||
|
ensure_folder_writable(
|
||||||
|
get_odoo_env_path(ctx, {"release": final_step["release"]}) / "src"
|
||||||
|
)
|
||||||
|
|
||||||
|
generate_analysis_files(
|
||||||
|
final_instance,
|
||||||
|
final_step,
|
||||||
|
odoo_initial_host_name,
|
||||||
|
initial_database,
|
||||||
|
)
|
||||||
|
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
logger.info("Received Keyboard Interrupt or System Exiting...")
|
||||||
|
finally:
|
||||||
|
kill_odoo(ctx, initial_step)
|
||||||
|
kill_odoo(ctx, final_step)
|
||||||
|
|
@ -1,22 +1,20 @@
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from odoo_openupgrade_wizard.configuration_version_dependant import (
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
get_odoo_env_path,
|
get_odoo_versions_from_options,
|
||||||
|
releases_options,
|
||||||
)
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import get_odoo_env_path
|
||||||
from odoo_openupgrade_wizard.tools_system import git_aggregate
|
from odoo_openupgrade_wizard.tools_system import git_aggregate
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
|
@releases_options
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def get_code(ctx):
|
def get_code(ctx, releases):
|
||||||
"""
|
"""Get code by running gitaggregate command for each release"""
|
||||||
Build OpenUpgrade Wizard Environment:
|
|
||||||
- gitaggregate all the repositories
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO, make it modular.
|
for odoo_version in get_odoo_versions_from_options(ctx, releases):
|
||||||
# For exemple, possibility to aggregate only 9.0 and 11.0 releaase
|
|
||||||
for odoo_version in ctx.obj["config"]["odoo_versions"]:
|
|
||||||
folder_path = get_odoo_env_path(ctx, odoo_version)
|
folder_path = get_odoo_env_path(ctx, odoo_version)
|
||||||
repo_file_path = folder_path / "repos.yml"
|
repo_file_path = folder_path / "repos.yml"
|
||||||
git_aggregate(folder_path, repo_file_path)
|
git_aggregate(folder_path, repo_file_path)
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,10 @@ import click
|
||||||
|
|
||||||
from odoo_openupgrade_wizard import templates
|
from odoo_openupgrade_wizard import templates
|
||||||
from odoo_openupgrade_wizard.configuration_version_dependant import (
|
from odoo_openupgrade_wizard.configuration_version_dependant import (
|
||||||
get_odoo_env_path,
|
|
||||||
get_odoo_versions,
|
get_odoo_versions,
|
||||||
get_release_options,
|
get_release_options,
|
||||||
)
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import get_odoo_env_path
|
||||||
from odoo_openupgrade_wizard.tools_system import (
|
from odoo_openupgrade_wizard.tools_system import (
|
||||||
ensure_file_exists_from_template,
|
ensure_file_exists_from_template,
|
||||||
ensure_folder_exists,
|
ensure_folder_exists,
|
||||||
|
|
@ -15,6 +15,16 @@ from odoo_openupgrade_wizard.tools_system import (
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
|
@click.option(
|
||||||
|
"--project-name",
|
||||||
|
required=True,
|
||||||
|
prompt=True,
|
||||||
|
type=str,
|
||||||
|
help="Name of your project without spaces neither special"
|
||||||
|
" chars or uppercases. exemple 'my-customer-9-12'."
|
||||||
|
" This will be used to tag with a friendly"
|
||||||
|
" name the odoo docker images.",
|
||||||
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--initial-release",
|
"--initial-release",
|
||||||
required=True,
|
required=True,
|
||||||
|
|
@ -35,9 +45,10 @@ from odoo_openupgrade_wizard.tools_system import (
|
||||||
"Ex: 'OCA/web,OCA/server-tools,GRAP/grap-odoo-incubator'",
|
"Ex: 'OCA/web,OCA/server-tools,GRAP/grap-odoo-incubator'",
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def init(ctx, initial_release, final_release, extra_repository_list):
|
def init(
|
||||||
"""
|
ctx, project_name, initial_release, final_release, extra_repository_list
|
||||||
Initialize OpenUpgrade Wizard Environment based on the initial and
|
):
|
||||||
|
"""Initialize OpenUpgrade Wizard Environment based on the initial and
|
||||||
the final release of Odoo you want to migrate.
|
the final release of Odoo you want to migrate.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
@ -71,28 +82,28 @@ def init(ctx, initial_release, final_release, extra_repository_list):
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add all upgrade steps
|
# Add all upgrade steps
|
||||||
count = 2
|
step_nbr = 2
|
||||||
for odoo_version in odoo_versions[1:]:
|
for odoo_version in odoo_versions[1:]:
|
||||||
steps.append(
|
steps.append(
|
||||||
{
|
{
|
||||||
"name": count,
|
"name": step_nbr,
|
||||||
"action": "upgrade",
|
"action": "upgrade",
|
||||||
"release": odoo_version["release"],
|
"release": odoo_version["release"],
|
||||||
"complete_name": "step_%s__upgrade__%s"
|
"complete_name": "step_%s__upgrade__%s"
|
||||||
% (str(count).rjust(2, "0"), odoo_version["release"]),
|
% (str(step_nbr).rjust(2, "0"), odoo_version["release"]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
count += 1
|
step_nbr += 1
|
||||||
|
|
||||||
# add final update step
|
# add final update step
|
||||||
count += 1
|
if len(odoo_versions) > 1:
|
||||||
steps.append(
|
steps.append(
|
||||||
{
|
{
|
||||||
"name": count,
|
"name": step_nbr,
|
||||||
"action": "update",
|
"action": "update",
|
||||||
"release": odoo_versions[-1]["release"],
|
"release": odoo_versions[-1]["release"],
|
||||||
"complete_name": "step_%s__update__%s"
|
"complete_name": "step_%s__update__%s"
|
||||||
% (str(count).rjust(2, "0"), odoo_versions[-1]["release"]),
|
% (str(step_nbr).rjust(2, "0"), odoo_versions[-1]["release"]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -100,17 +111,40 @@ def init(ctx, initial_release, final_release, extra_repository_list):
|
||||||
ensure_folder_exists(ctx.obj["src_folder_path"])
|
ensure_folder_exists(ctx.obj["src_folder_path"])
|
||||||
|
|
||||||
# 4. ensure filestore folder exists
|
# 4. ensure filestore folder exists
|
||||||
ensure_folder_exists(ctx.obj["filestore_folder_path"])
|
ensure_folder_exists(
|
||||||
|
ctx.obj["filestore_folder_path"], mode="777", git_ignore_content=True
|
||||||
|
)
|
||||||
|
|
||||||
# 5. ensure main configuration file exists
|
# 5. ensure postgres data folder exists
|
||||||
|
ensure_folder_exists(
|
||||||
|
ctx.obj["postgres_folder_path"].parent,
|
||||||
|
mode="777",
|
||||||
|
git_ignore_content=True,
|
||||||
|
)
|
||||||
|
ensure_folder_exists(
|
||||||
|
ctx.obj["postgres_folder_path"],
|
||||||
|
mode="777",
|
||||||
|
)
|
||||||
|
|
||||||
|
# 6. ensure main configuration file exists
|
||||||
ensure_file_exists_from_template(
|
ensure_file_exists_from_template(
|
||||||
ctx.obj["config_file_path"],
|
ctx.obj["config_file_path"],
|
||||||
templates.CONFIG_YML_TEMPLATE,
|
templates.CONFIG_YML_TEMPLATE,
|
||||||
|
project_name=project_name,
|
||||||
steps=steps,
|
steps=steps,
|
||||||
odoo_versions=odoo_versions,
|
odoo_versions=odoo_versions,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 6. Create one folder per version and add files
|
# 7. Ensure module list file exists
|
||||||
|
ensure_file_exists_from_template(
|
||||||
|
ctx.obj["module_file_path"],
|
||||||
|
templates.MODULES_CSV_TEMPLATE,
|
||||||
|
project_name=project_name,
|
||||||
|
steps=steps,
|
||||||
|
odoo_versions=odoo_versions,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 8. Create one folder per version and add files
|
||||||
for odoo_version in odoo_versions:
|
for odoo_version in odoo_versions:
|
||||||
# Create main path for each version
|
# Create main path for each version
|
||||||
path_version = get_odoo_env_path(ctx, odoo_version)
|
path_version = get_odoo_env_path(ctx, odoo_version)
|
||||||
|
|
@ -150,7 +184,12 @@ def init(ctx, initial_release, final_release, extra_repository_list):
|
||||||
odoo_version=odoo_version,
|
odoo_version=odoo_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 6. Create one folder per step and add files
|
# Create 'src' folder that will contain all the odoo code
|
||||||
|
ensure_folder_exists(
|
||||||
|
path_version / Path("src"), git_ignore_content=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# 9. Create one folder per step and add files
|
||||||
ensure_folder_exists(ctx.obj["script_folder_path"])
|
ensure_folder_exists(ctx.obj["script_folder_path"])
|
||||||
|
|
||||||
for step in steps:
|
for step in steps:
|
||||||
|
|
|
||||||
75
odoo_openupgrade_wizard/cli_install_from_csv.py
Normal file
75
odoo_openupgrade_wizard/cli_install_from_csv.py
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
import click
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option,
|
||||||
|
get_migration_step_from_options,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import (
|
||||||
|
get_odoo_modules_from_csv,
|
||||||
|
kill_odoo,
|
||||||
|
run_odoo,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo_instance import OdooInstance
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import ensure_database
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@database_option
|
||||||
|
@click.pass_context
|
||||||
|
def install_from_csv(ctx, database):
|
||||||
|
migration_step = get_migration_step_from_options(ctx, 1)
|
||||||
|
ensure_database(ctx, database, state="present")
|
||||||
|
|
||||||
|
# Get modules list from the CSV file
|
||||||
|
module_names = get_odoo_modules_from_csv(ctx.obj["module_file_path"])
|
||||||
|
module_names.sort()
|
||||||
|
logger.info("Found %d modules." % (len(module_names)))
|
||||||
|
logger.debug(module_names)
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("Install 'base' module on %s database ..." % (database))
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
migration_step,
|
||||||
|
database=database,
|
||||||
|
detached_container=True,
|
||||||
|
init="base",
|
||||||
|
)
|
||||||
|
odoo_instance = OdooInstance(ctx, database)
|
||||||
|
|
||||||
|
default_country_code = ctx.obj["config"].get(
|
||||||
|
"odoo_default_country_code", False
|
||||||
|
)
|
||||||
|
if "account" in module_names and default_country_code:
|
||||||
|
# Then, set correct country to the company of the current user
|
||||||
|
# Otherwise, due to poor design of Odoo, when installing account
|
||||||
|
# the US localization will be installed.
|
||||||
|
# (l10n_us + l10n_generic_coa)
|
||||||
|
|
||||||
|
countries = odoo_instance.browse_by_search(
|
||||||
|
"res.country",
|
||||||
|
[("code", "=", default_country_code)],
|
||||||
|
)
|
||||||
|
if len(countries) != 1:
|
||||||
|
raise Exception(
|
||||||
|
"Unable to find a country, based on the code %s."
|
||||||
|
" countries found : %s "
|
||||||
|
% (
|
||||||
|
default_country_code,
|
||||||
|
", ".join([x.name for x in countries]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Configuring country of the main company with #%d - %s"
|
||||||
|
% (countries[0].id, countries[0].name)
|
||||||
|
)
|
||||||
|
odoo_instance.env.user.company_id.country_id = countries[0].id
|
||||||
|
|
||||||
|
# Install modules
|
||||||
|
odoo_instance.install_modules(module_names)
|
||||||
|
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
logger.info("Received Keyboard Interrupt or System Exiting...")
|
||||||
|
finally:
|
||||||
|
kill_odoo(ctx, migration_step)
|
||||||
110
odoo_openupgrade_wizard/cli_options.py
Normal file
110
odoo_openupgrade_wizard/cli_options.py
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
import click
|
||||||
|
|
||||||
|
|
||||||
|
def releases_options(function):
|
||||||
|
function = click.option(
|
||||||
|
"-r",
|
||||||
|
"--releases",
|
||||||
|
type=str,
|
||||||
|
help="Coma-separated values of odoo releases for which"
|
||||||
|
" you want to perform the operation.",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def step_option(function):
|
||||||
|
function = click.option(
|
||||||
|
"-s",
|
||||||
|
"--step",
|
||||||
|
required=True,
|
||||||
|
prompt=True,
|
||||||
|
type=int,
|
||||||
|
help="Migration step for which you want to perform the operation.",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def first_step_option(function):
|
||||||
|
function = click.option(
|
||||||
|
"--first-step",
|
||||||
|
type=int,
|
||||||
|
help="First step for which to perform the operation",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def last_step_option(function):
|
||||||
|
function = click.option(
|
||||||
|
"--last-step",
|
||||||
|
type=int,
|
||||||
|
help="Last step for which to perform the operation",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def database_option(function):
|
||||||
|
function = click.option(
|
||||||
|
"-d",
|
||||||
|
"--database",
|
||||||
|
type=str,
|
||||||
|
help="Odoo Database for which you want to perform the operation.",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def database_option_required(function):
|
||||||
|
function = click.option(
|
||||||
|
"-d",
|
||||||
|
"--database",
|
||||||
|
required=True,
|
||||||
|
prompt=True,
|
||||||
|
type=str,
|
||||||
|
help="Odoo Database for which you want to perform the operation.",
|
||||||
|
)(function)
|
||||||
|
return function
|
||||||
|
|
||||||
|
|
||||||
|
def get_odoo_versions_from_options(ctx, releases_arg):
|
||||||
|
|
||||||
|
if not releases_arg:
|
||||||
|
return ctx.obj["config"]["odoo_versions"]
|
||||||
|
else:
|
||||||
|
odoo_versions = []
|
||||||
|
releases = [float(x) for x in releases_arg.split(",")]
|
||||||
|
for odoo_version in ctx.obj["config"]["odoo_versions"]:
|
||||||
|
if odoo_version["release"] in releases:
|
||||||
|
odoo_versions.append(odoo_version)
|
||||||
|
return odoo_versions
|
||||||
|
|
||||||
|
|
||||||
|
def get_migration_step_from_options(ctx, step_arg):
|
||||||
|
step = float(step_arg)
|
||||||
|
for migration_step in ctx.obj["config"]["migration_steps"]:
|
||||||
|
if migration_step["name"] == step:
|
||||||
|
return migration_step
|
||||||
|
raise ValueError(
|
||||||
|
"No migration step found in configuration for step %s" % step_arg
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_migration_steps_from_options(ctx, first_step_arg, last_step_arg):
|
||||||
|
result = []
|
||||||
|
if first_step_arg:
|
||||||
|
first_step = int(first_step_arg)
|
||||||
|
else:
|
||||||
|
first_step = ctx.obj["config"]["migration_steps"][0]["name"]
|
||||||
|
if last_step_arg:
|
||||||
|
last_step = int(last_step_arg)
|
||||||
|
else:
|
||||||
|
last_step = ctx.obj["config"]["migration_steps"][-1]["name"]
|
||||||
|
for migration_step in ctx.obj["config"]["migration_steps"]:
|
||||||
|
if migration_step["name"] in list(range(first_step, last_step + 1)):
|
||||||
|
result.append(migration_step.copy())
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"Unable to define steps in configuration"
|
||||||
|
" from options. (first step %s ; last step %s)"
|
||||||
|
% (first_step_arg, last_step_arg)
|
||||||
|
)
|
||||||
53
odoo_openupgrade_wizard/cli_run.py
Normal file
53
odoo_openupgrade_wizard/cli_run.py
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
import click
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option,
|
||||||
|
get_migration_step_from_options,
|
||||||
|
step_option,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import kill_odoo, run_odoo
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import ensure_database
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@step_option
|
||||||
|
@database_option
|
||||||
|
@click.option(
|
||||||
|
"--stop-after-init",
|
||||||
|
is_flag=True,
|
||||||
|
default=False,
|
||||||
|
help="Stop after init. Mainly used"
|
||||||
|
" for test purpose, for commands that are using input()"
|
||||||
|
" function to stop.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--init-modules",
|
||||||
|
type=str,
|
||||||
|
help="List of modules to install. Equivalent to -i odoo options.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def run(ctx, step, database, stop_after_init, init_modules):
|
||||||
|
|
||||||
|
migration_step = get_migration_step_from_options(ctx, step)
|
||||||
|
ensure_database(ctx, database, state="present")
|
||||||
|
try:
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
migration_step,
|
||||||
|
database=database,
|
||||||
|
detached_container=not stop_after_init,
|
||||||
|
init=init_modules,
|
||||||
|
stop_after_init=stop_after_init,
|
||||||
|
)
|
||||||
|
if not stop_after_init:
|
||||||
|
logger.info(
|
||||||
|
"Odoo is available on your host at"
|
||||||
|
" http://localhost:%s"
|
||||||
|
% ctx.obj["config"]["odoo_host_xmlrpc_port"]
|
||||||
|
)
|
||||||
|
input("Press 'Enter' to kill the odoo container and exit ...")
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
logger.info("Received Keyboard Interrupt or System Exiting...")
|
||||||
|
finally:
|
||||||
|
kill_odoo(ctx, migration_step)
|
||||||
41
odoo_openupgrade_wizard/cli_upgrade.py
Normal file
41
odoo_openupgrade_wizard/cli_upgrade.py
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
import click
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli_options import (
|
||||||
|
database_option_required,
|
||||||
|
first_step_option,
|
||||||
|
get_migration_steps_from_options,
|
||||||
|
last_step_option,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import (
|
||||||
|
execute_click_odoo_python_files,
|
||||||
|
kill_odoo,
|
||||||
|
run_odoo,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@first_step_option
|
||||||
|
@last_step_option
|
||||||
|
@database_option_required
|
||||||
|
@click.pass_context
|
||||||
|
def upgrade(ctx, first_step, last_step, database):
|
||||||
|
|
||||||
|
migration_steps = get_migration_steps_from_options(
|
||||||
|
ctx, first_step, last_step
|
||||||
|
)
|
||||||
|
for migration_step in migration_steps:
|
||||||
|
try:
|
||||||
|
run_odoo(
|
||||||
|
ctx,
|
||||||
|
migration_step,
|
||||||
|
database=database,
|
||||||
|
detached_container=False,
|
||||||
|
update="all",
|
||||||
|
stop_after_init=True,
|
||||||
|
)
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
logger.info("Received Keyboard Interrupt or System Exiting...")
|
||||||
|
finally:
|
||||||
|
kill_odoo(ctx, migration_step)
|
||||||
|
execute_click_odoo_python_files(ctx, database, migration_step)
|
||||||
|
|
@ -1,33 +1,35 @@
|
||||||
# from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
# List of the series of odoo
|
|
||||||
# python version is defined, based on the OCA CI.
|
|
||||||
# https://github.com/OCA/oca-addons-repo-template/blob/master/src/.github/workflows/%7B%25%20if%20ci%20%3D%3D%20'GitHub'%20%25%7Dtest.yml%7B%25%20endif%20%25%7D.jinja
|
|
||||||
_ODOO_VERSION_TEMPLATES = [
|
_ODOO_VERSION_TEMPLATES = [
|
||||||
{
|
{
|
||||||
"release": 8.0,
|
"release": 8.0,
|
||||||
"python_major_version": "python2",
|
"python_major_version": "python2",
|
||||||
"python_libraries": ["openupgradelib"],
|
"python_libraries": [],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"release": 9.0,
|
"release": 9.0,
|
||||||
"python_major_version": "python2",
|
"python_major_version": "python2",
|
||||||
"python_libraries": ["openupgradelib"],
|
"python_libraries": ["openupgradelib==2.0.0"],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"release": 10.0,
|
"release": 10.0,
|
||||||
"python_major_version": "python2",
|
"python_major_version": "python2",
|
||||||
"python_libraries": ["openupgradelib"],
|
"python_libraries": ["openupgradelib==2.0.0"],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"release": 11.0,
|
"release": 11.0,
|
||||||
"python_major_version": "python3",
|
"python_major_version": "python3",
|
||||||
"python_libraries": ["openupgradelib"],
|
"python_libraries": ["openupgradelib==2.0.0"],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"release": 12.0,
|
"release": 12.0,
|
||||||
"python_major_version": "python3",
|
"python_major_version": "python3",
|
||||||
"python_libraries": ["openupgradelib"],
|
"python_libraries": [
|
||||||
|
"git+https://github.com/grap/openupgradelib.git"
|
||||||
|
"@2.0.1#egg=openupgradelib"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"release": 13.0,
|
"release": 13.0,
|
||||||
|
|
@ -78,10 +80,151 @@ def get_odoo_versions(initial_release: float, final_release: float) -> list:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# def _get_repo_file(ctx, step):
|
def get_odoo_run_command(migration_step: dict) -> str:
|
||||||
# return ctx.obj["repo_folder_path"] / Path("%s.yml" % (step["version"]))
|
"""Return the name of the command to execute, depending on the migration
|
||||||
|
step. (odoo-bin, odoo.py, etc...)"""
|
||||||
|
if migration_step["release"] >= 10.0:
|
||||||
|
return "odoo-bin"
|
||||||
|
|
||||||
|
return "odoo.py"
|
||||||
|
|
||||||
|
|
||||||
def get_odoo_env_path(ctx, odoo_version):
|
def get_odoo_folder(migration_step: dict) -> str:
|
||||||
folder_name = "env_%s" % str(odoo_version["release"]).rjust(4, "0")
|
"""return the main odoo folder, depending on the migration step.
|
||||||
return ctx.obj["src_folder_path"] / folder_name
|
(./src/odoo, ./src/openupgrade, ...)"""
|
||||||
|
|
||||||
|
if migration_step["action"] == "update":
|
||||||
|
return "src/odoo"
|
||||||
|
|
||||||
|
if migration_step["release"] >= 14.0:
|
||||||
|
return "src/odoo"
|
||||||
|
|
||||||
|
return "src/openupgrade"
|
||||||
|
|
||||||
|
|
||||||
|
def get_base_module_folder(migration_step: dict) -> str:
|
||||||
|
"""return the name of the folder (odoo, openerp, etc...)
|
||||||
|
where the 'base' module is, depending on the migration_step"""
|
||||||
|
if migration_step["release"] >= 10.0:
|
||||||
|
return "odoo"
|
||||||
|
|
||||||
|
return "openerp"
|
||||||
|
|
||||||
|
|
||||||
|
def skip_addon_path(migration_step: dict, path: Path) -> bool:
|
||||||
|
"""return a boolean to indicate if the addon_path should be
|
||||||
|
remove (during the generation of the addons_path).
|
||||||
|
Note : if repo.yml contains both odoo and openupgrade repo
|
||||||
|
we skip one of them (before the V14 refactoring)"""
|
||||||
|
return (
|
||||||
|
str(path).endswith("/src/odoo")
|
||||||
|
or str(path).endswith("src/openupgrade")
|
||||||
|
) and migration_step["release"] < 14.0
|
||||||
|
|
||||||
|
|
||||||
|
def get_server_wide_modules_upgrade(migration_step: dict) -> list:
|
||||||
|
"""return a list of modules to load, depending on the migration step."""
|
||||||
|
if (
|
||||||
|
migration_step["release"] >= 14.0
|
||||||
|
and migration_step["action"] == "upgrade"
|
||||||
|
):
|
||||||
|
return ["openupgrade_framework"]
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def get_upgrade_analysis_module(migration_step: dict) -> str:
|
||||||
|
"""return the upgrade_analysis module name"""
|
||||||
|
|
||||||
|
if migration_step["release"] >= 14.0:
|
||||||
|
# (Module in OCA/server-tools)
|
||||||
|
return "upgrade_analysis"
|
||||||
|
|
||||||
|
# (module in OCA/OpenUpgrade/odoo/addons/)
|
||||||
|
return "openupgrade_records"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_records(odoo_instance, migration_step: dict):
|
||||||
|
logger.info(
|
||||||
|
"Generate Records in release %s ..."
|
||||||
|
" (It can take a while)" % (migration_step["release"])
|
||||||
|
)
|
||||||
|
if migration_step["release"] < 14.0:
|
||||||
|
wizard = odoo_instance.browse_by_create(
|
||||||
|
"openupgrade.generate.records.wizard", {}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
wizard = odoo_instance.browse_by_create(
|
||||||
|
"upgrade.generate.record.wizard", {}
|
||||||
|
)
|
||||||
|
wizard.generate()
|
||||||
|
|
||||||
|
|
||||||
|
def get_installable_odoo_modules(odoo_instance, migraton_step):
|
||||||
|
if migraton_step["release"] < 14.0:
|
||||||
|
# TODO, improve that algorithm, if possible
|
||||||
|
modules = odoo_instance.browse_by_search(
|
||||||
|
"ir.module.module",
|
||||||
|
[
|
||||||
|
("state", "!=", "uninstallable"),
|
||||||
|
("website", "not ilike", "github/OCA"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# We use here a new feature implemented in the upgrade_analysis
|
||||||
|
# in a wizard to install odoo modules
|
||||||
|
wizard = odoo_instance.browse_by_create("upgrade.install.wizard", {})
|
||||||
|
wizard.select_odoo_modules()
|
||||||
|
modules = wizard.module_ids
|
||||||
|
|
||||||
|
return modules.mapped("name")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_analysis_files(
|
||||||
|
final_odoo_instance, final_step, initial_odoo_host, initial_database
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
"Generate analysis files for"
|
||||||
|
" the modules installed on %s ..." % (initial_database)
|
||||||
|
)
|
||||||
|
proxy_vals = {
|
||||||
|
"name": "Proxy to Previous Release",
|
||||||
|
"server": initial_odoo_host,
|
||||||
|
"port": "8069",
|
||||||
|
"database": initial_database,
|
||||||
|
"username": "admin",
|
||||||
|
"password": "admin",
|
||||||
|
}
|
||||||
|
if final_step["release"] < 14.0:
|
||||||
|
logger.info("> Create proxy ...")
|
||||||
|
proxy = final_odoo_instance.browse_by_create(
|
||||||
|
"openupgrade.comparison.config", proxy_vals
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("> Create wizard ...")
|
||||||
|
wizard = final_odoo_instance.browse_by_create(
|
||||||
|
"openupgrade.analysis.wizard",
|
||||||
|
{
|
||||||
|
"server_config": proxy.id,
|
||||||
|
"write_files": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
logger.info("> Launch analysis. This can take a while ...")
|
||||||
|
wizard.get_communication()
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.info("> Create proxy ...")
|
||||||
|
proxy = final_odoo_instance.browse_by_create(
|
||||||
|
"upgrade.comparison.config", proxy_vals
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("> Create wizard ...")
|
||||||
|
analysis = final_odoo_instance.browse_by_create(
|
||||||
|
"upgrade.analysis",
|
||||||
|
{
|
||||||
|
"config_id": proxy.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("> Launch analysis. This can take a while ...")
|
||||||
|
analysis.analyze()
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,13 @@
|
||||||
CONFIG_YML_TEMPLATE = """odoo_versions:
|
CONFIG_YML_TEMPLATE = """
|
||||||
|
project_name: {{ project_name }}
|
||||||
|
|
||||||
|
postgres_image_name: postgres:13
|
||||||
|
postgres_container_name: {{project_name}}-db
|
||||||
|
|
||||||
|
odoo_host_xmlrpc_port: 9069
|
||||||
|
odoo_default_country_code: FR
|
||||||
|
|
||||||
|
odoo_versions:
|
||||||
{% for odoo_version in odoo_versions %}
|
{% for odoo_version in odoo_versions %}
|
||||||
- release: {{ odoo_version['release'] }}
|
- release: {{ odoo_version['release'] }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
@ -61,12 +70,20 @@ PYTHON_REQUIREMENTS_TXT_TEMPLATE = """
|
||||||
{%- for python_librairy in python_libraries -%}
|
{%- for python_librairy in python_libraries -%}
|
||||||
{{ python_librairy }}
|
{{ python_librairy }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
odoorpc
|
||||||
|
click-odoo
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DEBIAN_REQUIREMENTS_TXT_TEMPLATE = ""
|
DEBIAN_REQUIREMENTS_TXT_TEMPLATE = """
|
||||||
|
git
|
||||||
|
"""
|
||||||
|
|
||||||
ODOO_CONFIG_TEMPLATE = ""
|
ODOO_CONFIG_TEMPLATE = ""
|
||||||
|
|
||||||
|
|
||||||
|
# Technical Notes:
|
||||||
|
# - We set apt-get update || true, because for some release (at least odoo:10)
|
||||||
|
# the command update fail, because of obsolete postgresql repository.
|
||||||
DOCKERFILE_TEMPLATE = """
|
DOCKERFILE_TEMPLATE = """
|
||||||
FROM odoo:{{ odoo_version['release'] }}
|
FROM odoo:{{ odoo_version['release'] }}
|
||||||
MAINTAINER GRAP, Coop It Easy
|
MAINTAINER GRAP, Coop It Easy
|
||||||
|
|
@ -81,12 +98,12 @@ COPY debian_requirements.txt /debian_requirements.txt
|
||||||
COPY python_requirements.txt /python_requirements.txt
|
COPY python_requirements.txt /python_requirements.txt
|
||||||
|
|
||||||
# 2. Install extra debian packages
|
# 2. Install extra debian packages
|
||||||
RUN apt-get update &&\
|
RUN apt-get update || true &&\
|
||||||
xargs apt-get install -y --no-install-recommends <debian_requirements.txt
|
xargs apt-get install -y --no-install-recommends <debian_requirements.txt
|
||||||
|
|
||||||
# 3. Install extra Python librairies
|
# 3. Install extra Python librairies
|
||||||
RUN {{ odoo_version["python_major_version"] }}\
|
RUN {{ odoo_version["python_major_version"] }}\
|
||||||
-m pip install -e python_requirements.txt
|
-m pip install -r python_requirements.txt
|
||||||
|
|
||||||
# Reset to odoo user to run the container
|
# Reset to odoo user to run the container
|
||||||
USER odoo
|
USER odoo
|
||||||
|
|
@ -95,6 +112,65 @@ USER odoo
|
||||||
PRE_MIGRATION_SQL_TEMPLATE = ""
|
PRE_MIGRATION_SQL_TEMPLATE = ""
|
||||||
|
|
||||||
POST_MIGRATION_PY_TEMPLATE = """
|
POST_MIGRATION_PY_TEMPLATE = """
|
||||||
def main(self, step):
|
import logging
|
||||||
pass
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
_logger.info("Executing post-migration.py script ...")
|
||||||
|
|
||||||
|
env = env # noqa: F821
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GIT_IGNORE_CONTENT = """
|
||||||
|
*
|
||||||
|
!.gitignore
|
||||||
|
"""
|
||||||
|
|
||||||
|
MODULES_CSV_TEMPLATE = """
|
||||||
|
base,Base
|
||||||
|
account,Account Module
|
||||||
|
web_responsive,Web Responsive Module
|
||||||
|
"""
|
||||||
|
|
||||||
|
ANALYSIS_TEMPLATE = """
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<h1>Migration Analysis</h1>
|
||||||
|
<table border="1" width="100%">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Initial Release</th>
|
||||||
|
<th>Final Release</th>
|
||||||
|
<th>Project Name</th>
|
||||||
|
<th>Analysis Date</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>{{ ctx.obj["config"]["odoo_versions"][0]["release"] }}</td>
|
||||||
|
<td>{{ ctx.obj["config"]["odoo_versions"][-1]["release"] }}</td>
|
||||||
|
<td>{{ ctx.obj["config"]["project_name"] }}</td>
|
||||||
|
<td>{{ current_date }}</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table border="1" width="100%">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th> - </th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{%- for odoo_module in analysis.modules -%}
|
||||||
|
<tr>
|
||||||
|
<td>{{odoo_module.name}} ({{odoo_module.module_type}})
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
90
odoo_openupgrade_wizard/tools_docker.py
Normal file
90
odoo_openupgrade_wizard/tools_docker.py
Normal file
|
|
@ -0,0 +1,90 @@
|
||||||
|
import docker
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_client():
|
||||||
|
return docker.from_env()
|
||||||
|
|
||||||
|
|
||||||
|
def build_image(path, tag):
|
||||||
|
logger.debug(
|
||||||
|
"Building image named based on %s/Dockerfile."
|
||||||
|
" This can take a big while ..." % (path)
|
||||||
|
)
|
||||||
|
debug_docker_command = "docker build %s --tag %s" % (path, tag)
|
||||||
|
logger.debug("DOCKER COMMAND:\n %s" % debug_docker_command)
|
||||||
|
docker_client = get_docker_client()
|
||||||
|
image = docker_client.images.build(
|
||||||
|
path=str(path),
|
||||||
|
tag=tag,
|
||||||
|
)
|
||||||
|
logger.debug("Image build.")
|
||||||
|
return image
|
||||||
|
|
||||||
|
|
||||||
|
def run_container(
|
||||||
|
image_name,
|
||||||
|
container_name,
|
||||||
|
command=None,
|
||||||
|
ports={},
|
||||||
|
volumes={},
|
||||||
|
environments={},
|
||||||
|
links={},
|
||||||
|
detach=False,
|
||||||
|
auto_remove=False,
|
||||||
|
):
|
||||||
|
client = get_docker_client()
|
||||||
|
|
||||||
|
logger.debug("Launching Docker container named %s ..." % (image_name))
|
||||||
|
debug_docker_command = "docker run --name %s\\\n" % (container_name)
|
||||||
|
|
||||||
|
for k, v in ports.items():
|
||||||
|
debug_docker_command += " --publish {k}:{v}\\\n".format(k=k, v=v)
|
||||||
|
for k, v in volumes.items():
|
||||||
|
debug_docker_command += " --volume {k}:{v}\\\n".format(
|
||||||
|
k=str(k), v=str(v)
|
||||||
|
)
|
||||||
|
for k, v in environments.items():
|
||||||
|
debug_docker_command += " --env {k}={v}\\\n".format(k=k, v=v)
|
||||||
|
for k, v in links.items():
|
||||||
|
debug_docker_command += " --link {k}:{v}\\\n".format(k=k, v=v)
|
||||||
|
if auto_remove:
|
||||||
|
debug_docker_command += " --rm"
|
||||||
|
if detach:
|
||||||
|
debug_docker_command += " --detach"
|
||||||
|
debug_docker_command += " %s" % (image_name)
|
||||||
|
if command:
|
||||||
|
debug_docker_command += " \\\n%s" % (command)
|
||||||
|
logger.debug("DOCKER COMMAND:\n %s" % debug_docker_command)
|
||||||
|
|
||||||
|
container = client.containers.run(
|
||||||
|
image_name,
|
||||||
|
name=container_name,
|
||||||
|
command=command,
|
||||||
|
ports={x: y for y, x in ports.items()},
|
||||||
|
volumes=[str(k) + ":" + str(v) for k, v in volumes.items()],
|
||||||
|
environment=environments,
|
||||||
|
links=links,
|
||||||
|
detach=detach,
|
||||||
|
auto_remove=auto_remove,
|
||||||
|
)
|
||||||
|
if detach:
|
||||||
|
logger.debug("Container %s launched." % image_name)
|
||||||
|
elif auto_remove:
|
||||||
|
logger.debug("Container closed.")
|
||||||
|
|
||||||
|
return container
|
||||||
|
|
||||||
|
|
||||||
|
def kill_container(container_name):
|
||||||
|
client = get_docker_client()
|
||||||
|
containers = client.containers.list(
|
||||||
|
all=True,
|
||||||
|
filters={"name": container_name},
|
||||||
|
)
|
||||||
|
for container in containers:
|
||||||
|
logger.debug(
|
||||||
|
"Stop container %s, based on image '%s'."
|
||||||
|
% (container.name, ",".join(container.image.tags))
|
||||||
|
)
|
||||||
|
container.stop()
|
||||||
326
odoo_openupgrade_wizard/tools_odoo.py
Normal file
326
odoo_openupgrade_wizard/tools_odoo.py
Normal file
|
|
@ -0,0 +1,326 @@
|
||||||
|
import configparser
|
||||||
|
import csv
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.configuration_version_dependant import (
|
||||||
|
get_base_module_folder,
|
||||||
|
get_odoo_folder,
|
||||||
|
get_odoo_run_command,
|
||||||
|
get_server_wide_modules_upgrade,
|
||||||
|
skip_addon_path,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_docker import kill_container, run_container
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import get_postgres_container
|
||||||
|
from odoo_openupgrade_wizard.tools_system import get_script_folder
|
||||||
|
|
||||||
|
|
||||||
|
def get_odoo_addons_path(ctx, root_path: Path, migration_step: dict) -> str:
|
||||||
|
odoo_version = get_odoo_version_from_migration_step(ctx, migration_step)
|
||||||
|
repo_file = get_odoo_env_path(ctx, odoo_version) / Path("repos.yml")
|
||||||
|
base_module_folder = get_base_module_folder(migration_step)
|
||||||
|
stream = open(repo_file, "r")
|
||||||
|
data = yaml.safe_load(stream)
|
||||||
|
data = data
|
||||||
|
|
||||||
|
addons_path = []
|
||||||
|
for key in data.keys():
|
||||||
|
path = root_path / Path(key)
|
||||||
|
if str(path).endswith(get_odoo_folder(migration_step)):
|
||||||
|
# Add two folder for odoo folder
|
||||||
|
addons_path.append(path / Path("addons"))
|
||||||
|
addons_path.append(
|
||||||
|
path / Path(base_module_folder) / Path("addons")
|
||||||
|
)
|
||||||
|
elif skip_addon_path(migration_step, path):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
addons_path.append(path)
|
||||||
|
|
||||||
|
return addons_path
|
||||||
|
|
||||||
|
|
||||||
|
def get_odoo_env_path(ctx, odoo_version: dict) -> Path:
|
||||||
|
folder_name = "env_%s" % str(odoo_version["release"]).rjust(4, "0")
|
||||||
|
return ctx.obj["src_folder_path"] / folder_name
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_image_tag(ctx, odoo_version: dict) -> str:
|
||||||
|
"""Return a docker image tag, based on project name and odoo release"""
|
||||||
|
return "odoo-openupgrade-wizard-image__%s__%s" % (
|
||||||
|
ctx.obj["config"]["project_name"],
|
||||||
|
str(odoo_version["release"]).rjust(4, "0"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_container_name(ctx, migration_step: dict) -> str:
|
||||||
|
"""Return a docker container name, based on project name,
|
||||||
|
odoo release and migration step"""
|
||||||
|
return "odoo-openupgrade-wizard-container__%s__%s__step-%s" % (
|
||||||
|
ctx.obj["config"]["project_name"],
|
||||||
|
str(migration_step["release"]).rjust(4, "0"),
|
||||||
|
str(migration_step["name"]).rjust(2, "0"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_odoo_version_from_migration_step(ctx, migration_step: dict) -> dict:
|
||||||
|
for odoo_version in ctx.obj["config"]["odoo_versions"]:
|
||||||
|
if odoo_version["release"] == migration_step["release"]:
|
||||||
|
return odoo_version
|
||||||
|
# TODO, improve exception
|
||||||
|
raise Exception
|
||||||
|
|
||||||
|
|
||||||
|
def generate_odoo_command(
|
||||||
|
ctx,
|
||||||
|
migration_step: dict,
|
||||||
|
database: str,
|
||||||
|
update: str,
|
||||||
|
init: str,
|
||||||
|
stop_after_init: bool,
|
||||||
|
shell: bool,
|
||||||
|
demo: bool,
|
||||||
|
) -> str:
|
||||||
|
database_cmd = database and "--database %s" % database or ""
|
||||||
|
update_cmd = update and "--update %s" % update or ""
|
||||||
|
init_cmd = init and "--init %s" % init or ""
|
||||||
|
stop_after_init_cmd = stop_after_init and "--stop-after-init" or ""
|
||||||
|
shell_cmd = shell and "shell" or ""
|
||||||
|
demo_cmd = not demo and "--without-demo all" or ""
|
||||||
|
command = (
|
||||||
|
Path("/odoo_env")
|
||||||
|
/ Path(get_odoo_folder(migration_step))
|
||||||
|
/ Path(get_odoo_run_command(migration_step))
|
||||||
|
)
|
||||||
|
result = (
|
||||||
|
f" {command}"
|
||||||
|
f" {shell_cmd}"
|
||||||
|
f" --config /odoo_env/_auto_generated_odoo.cfg"
|
||||||
|
f" {demo_cmd}"
|
||||||
|
f" {database_cmd}"
|
||||||
|
f" {update_cmd}"
|
||||||
|
f" {init_cmd}"
|
||||||
|
f" {stop_after_init_cmd}"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def generate_odoo_config_file(ctx, migration_step, log_file):
|
||||||
|
"""Create a config file name _auto_generated_odoo.cfg
|
||||||
|
in the according environment (defined by migration_step)
|
||||||
|
This configuration file is a merge of the odoo.cfg file that can
|
||||||
|
contain custom values, and the values required to run the docker container.
|
||||||
|
"""
|
||||||
|
odoo_version = get_odoo_version_from_migration_step(ctx, migration_step)
|
||||||
|
odoo_env_path = get_odoo_env_path(ctx, odoo_version)
|
||||||
|
|
||||||
|
custom_odoo_config_file = odoo_env_path / "odoo.cfg"
|
||||||
|
auto_generated_odoo_config_file = (
|
||||||
|
odoo_env_path / "_auto_generated_odoo.cfg"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = configparser.RawConfigParser()
|
||||||
|
# Read custom file
|
||||||
|
parser.read(custom_odoo_config_file)
|
||||||
|
|
||||||
|
# compute addons_path
|
||||||
|
addons_path = ",".join(
|
||||||
|
[
|
||||||
|
str(x)
|
||||||
|
for x in get_odoo_addons_path(
|
||||||
|
ctx, Path("/odoo_env"), migration_step
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# compute server wides modules
|
||||||
|
server_wide_modules = parser.get(
|
||||||
|
"options", "server_wide_modules", fallback=[]
|
||||||
|
)
|
||||||
|
server_wide_modules += get_server_wide_modules_upgrade(migration_step)
|
||||||
|
|
||||||
|
# Add required keys
|
||||||
|
if "options" not in parser:
|
||||||
|
parser.add_section("options")
|
||||||
|
parser.set("options", "db_host", "db")
|
||||||
|
parser.set("options", "db_port", 5432)
|
||||||
|
parser.set("options", "db_user", "odoo")
|
||||||
|
parser.set("options", "db_password", "odoo")
|
||||||
|
parser.set("options", "workers", 0)
|
||||||
|
parser.set("options", "data_dir", "/env/filestore/")
|
||||||
|
parser.set("options", "logfile", log_file)
|
||||||
|
parser.set("options", "addons_path", addons_path)
|
||||||
|
if server_wide_modules:
|
||||||
|
parser.set(
|
||||||
|
"options", "server_wide_modules", ",".join(server_wide_modules)
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.write(open(auto_generated_odoo_config_file, "w"))
|
||||||
|
|
||||||
|
|
||||||
|
def run_odoo(
|
||||||
|
ctx,
|
||||||
|
migration_step: dict,
|
||||||
|
detached_container: bool = False,
|
||||||
|
database: str = False,
|
||||||
|
update: str = False,
|
||||||
|
init: str = False,
|
||||||
|
stop_after_init: bool = False,
|
||||||
|
shell: bool = False,
|
||||||
|
demo: bool = False,
|
||||||
|
alternative_xml_rpc_port: int = False,
|
||||||
|
links: dict = {},
|
||||||
|
):
|
||||||
|
# Ensure that Postgres container exist
|
||||||
|
get_postgres_container(ctx)
|
||||||
|
logger.info(
|
||||||
|
"Launching Odoo Container (Release {release}) for {db_text}"
|
||||||
|
" in {action} mode. Demo Data is {demo_text}"
|
||||||
|
" {stop_text} {init_text} {update_text}".format(
|
||||||
|
release=migration_step["release"],
|
||||||
|
db_text=database and "database '%s'" % database or "any databases",
|
||||||
|
action=migration_step["action"] == "update"
|
||||||
|
and "regular"
|
||||||
|
or "OpenUpgrade",
|
||||||
|
demo_text=demo and "enabled" or "disabled",
|
||||||
|
stop_text=stop_after_init and " (stop-after-init)" or "",
|
||||||
|
init_text=init and " (Init : %s)" % init or "",
|
||||||
|
update_text=update and " (Update : %s)" % update or "",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
odoo_version = get_odoo_version_from_migration_step(ctx, migration_step)
|
||||||
|
env_path = ctx.obj["env_folder_path"]
|
||||||
|
odoo_env_path = get_odoo_env_path(ctx, odoo_version)
|
||||||
|
log_file = "/env/log/{}____{}.log".format(
|
||||||
|
ctx.obj["log_prefix"], migration_step["complete_name"]
|
||||||
|
)
|
||||||
|
generate_odoo_config_file(ctx, migration_step, log_file)
|
||||||
|
|
||||||
|
command = generate_odoo_command(
|
||||||
|
ctx,
|
||||||
|
migration_step,
|
||||||
|
database=database,
|
||||||
|
update=update,
|
||||||
|
init=init,
|
||||||
|
stop_after_init=stop_after_init,
|
||||||
|
shell=shell,
|
||||||
|
demo=demo,
|
||||||
|
)
|
||||||
|
|
||||||
|
host_xmlrpc_port = (
|
||||||
|
alternative_xml_rpc_port
|
||||||
|
and alternative_xml_rpc_port
|
||||||
|
or ctx.obj["config"]["odoo_host_xmlrpc_port"]
|
||||||
|
)
|
||||||
|
links.update({ctx.obj["config"]["postgres_container_name"]: "db"})
|
||||||
|
return run_container(
|
||||||
|
get_docker_image_tag(ctx, odoo_version),
|
||||||
|
get_docker_container_name(ctx, migration_step),
|
||||||
|
command=command,
|
||||||
|
ports={
|
||||||
|
host_xmlrpc_port: 8069,
|
||||||
|
},
|
||||||
|
volumes={
|
||||||
|
env_path: "/env/",
|
||||||
|
odoo_env_path: "/odoo_env/",
|
||||||
|
},
|
||||||
|
links=links,
|
||||||
|
detach=detached_container,
|
||||||
|
auto_remove=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def kill_odoo(ctx, migration_step: dict):
|
||||||
|
kill_container(get_docker_container_name(ctx, migration_step))
|
||||||
|
|
||||||
|
|
||||||
|
def execute_click_odoo_python_files(
|
||||||
|
ctx, database: str, migration_step: dict, python_files: list = []
|
||||||
|
):
|
||||||
|
|
||||||
|
if not python_files:
|
||||||
|
# Get post-migration python scripts to execute
|
||||||
|
script_folder = get_script_folder(ctx, migration_step)
|
||||||
|
python_files = [
|
||||||
|
Path("scripts") / Path(migration_step["complete_name"]) / Path(f)
|
||||||
|
for f in os.listdir(script_folder)
|
||||||
|
if os.path.isfile(os.path.join(script_folder, f))
|
||||||
|
and f[-3:] == ".py"
|
||||||
|
]
|
||||||
|
python_files = sorted(python_files)
|
||||||
|
|
||||||
|
# Prepare data information for docker
|
||||||
|
odoo_version = get_odoo_version_from_migration_step(ctx, migration_step)
|
||||||
|
links = {ctx.obj["config"]["postgres_container_name"]: "db"}
|
||||||
|
env_path = ctx.obj["env_folder_path"]
|
||||||
|
odoo_env_path = get_odoo_env_path(ctx, odoo_version)
|
||||||
|
|
||||||
|
# Generate odoo config file
|
||||||
|
log_file = "/env/log/{}____{}__post_migration.log".format(
|
||||||
|
ctx.obj["log_prefix"], migration_step["complete_name"]
|
||||||
|
)
|
||||||
|
generate_odoo_config_file(ctx, migration_step, log_file)
|
||||||
|
|
||||||
|
for python_file in python_files:
|
||||||
|
# TODO, check if we should set python2 for old version of Odoo
|
||||||
|
# or just 'python'
|
||||||
|
command = (
|
||||||
|
"click-odoo"
|
||||||
|
" --database {database}"
|
||||||
|
" --config /odoo_env/_auto_generated_odoo.cfg"
|
||||||
|
" /env/{python_file}"
|
||||||
|
).format(
|
||||||
|
database=database,
|
||||||
|
python_file=str(python_file),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
"Executing script %s / %s"
|
||||||
|
% (migration_step["complete_name"], python_file)
|
||||||
|
)
|
||||||
|
run_container(
|
||||||
|
get_docker_image_tag(ctx, odoo_version),
|
||||||
|
get_docker_container_name(ctx, migration_step),
|
||||||
|
command=command,
|
||||||
|
ports={},
|
||||||
|
volumes={
|
||||||
|
env_path: "/env/",
|
||||||
|
odoo_env_path: "/odoo_env/",
|
||||||
|
},
|
||||||
|
links=links,
|
||||||
|
detach=False,
|
||||||
|
auto_remove=True,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc()
|
||||||
|
logger.error(
|
||||||
|
"An error occured. Exiting. %s\n%s"
|
||||||
|
% (e, traceback.print_exception(*sys.exc_info()))
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
kill_odoo(ctx, migration_step)
|
||||||
|
|
||||||
|
|
||||||
|
def get_odoo_modules_from_csv(module_file_path: Path) -> list:
|
||||||
|
logger.info("Reading '%s' file ..." % module_file_path)
|
||||||
|
module_names = []
|
||||||
|
csvfile = open(module_file_path, "r")
|
||||||
|
spamreader = csv.reader(csvfile, delimiter=",", quotechar='"')
|
||||||
|
for row in spamreader:
|
||||||
|
# Try to guess that a line is not correct
|
||||||
|
if not row:
|
||||||
|
continue
|
||||||
|
if not row[0]:
|
||||||
|
continue
|
||||||
|
if " " in row[0]:
|
||||||
|
continue
|
||||||
|
if any([x.isupper() for x in row[0]]):
|
||||||
|
continue
|
||||||
|
module_names.append(row[0])
|
||||||
|
return module_names
|
||||||
210
odoo_openupgrade_wizard/tools_odoo_instance.py
Normal file
210
odoo_openupgrade_wizard/tools_odoo_instance.py
Normal file
|
|
@ -0,0 +1,210 @@
|
||||||
|
import socket
|
||||||
|
import time
|
||||||
|
|
||||||
|
import odoorpc
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
# Wait for the launch of odoo instance 60 seconds
|
||||||
|
_ODOO_RPC_MAX_TRY = 60
|
||||||
|
|
||||||
|
# Timeout for odoorpc call is 24 hours
|
||||||
|
_ODOO_RPC_TIMEOUT = 86400
|
||||||
|
|
||||||
|
|
||||||
|
class OdooInstance:
|
||||||
|
|
||||||
|
env = False
|
||||||
|
version = False
|
||||||
|
|
||||||
|
def __init__(self, ctx, database, alternative_xml_rpc_port=False):
|
||||||
|
port = (
|
||||||
|
alternative_xml_rpc_port
|
||||||
|
and alternative_xml_rpc_port
|
||||||
|
or ctx.obj["config"]["odoo_host_xmlrpc_port"]
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Connect to Odoo database %s via odoorpc (Port %s)... "
|
||||||
|
% (database, port)
|
||||||
|
)
|
||||||
|
|
||||||
|
for x in range(1, _ODOO_RPC_MAX_TRY + 1):
|
||||||
|
# Connection
|
||||||
|
try:
|
||||||
|
rpc_connexion = odoorpc.ODOO(
|
||||||
|
"0.0.0.0",
|
||||||
|
"jsonrpc",
|
||||||
|
port=port,
|
||||||
|
timeout=_ODOO_RPC_TIMEOUT,
|
||||||
|
)
|
||||||
|
# connexion is OK
|
||||||
|
break
|
||||||
|
except (socket.gaierror, socket.error) as e:
|
||||||
|
if x < _ODOO_RPC_MAX_TRY:
|
||||||
|
logger.debug(
|
||||||
|
"%d/%d Unable to connect to the server."
|
||||||
|
" Retrying in 1 second ..." % (x, _ODOO_RPC_MAX_TRY)
|
||||||
|
)
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
logger.critical(
|
||||||
|
"%d/%d Unable to connect to the server."
|
||||||
|
% (x, _ODOO_RPC_MAX_TRY)
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
# Login
|
||||||
|
try:
|
||||||
|
rpc_connexion.login(
|
||||||
|
database,
|
||||||
|
"admin",
|
||||||
|
"admin",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
"Unable to connect to http://localhost:%s"
|
||||||
|
" with login %s and password %s"
|
||||||
|
% (
|
||||||
|
port,
|
||||||
|
"admin",
|
||||||
|
"admin",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.env = rpc_connexion.env
|
||||||
|
self.version = rpc_connexion.version
|
||||||
|
|
||||||
|
def browse_by_search(
|
||||||
|
self, model_name, domain=False, order=False, limit=False
|
||||||
|
):
|
||||||
|
domain = domain or []
|
||||||
|
model = self.env[model_name]
|
||||||
|
return model.browse(model.search(domain, order=order, limit=limit))
|
||||||
|
|
||||||
|
def browse_by_create(self, model_name, vals):
|
||||||
|
model = self.env[model_name]
|
||||||
|
return model.browse(model.create(vals))
|
||||||
|
|
||||||
|
def check_modules_installed(self, module_names) -> bool:
|
||||||
|
if type(module_names) == str:
|
||||||
|
module_names = [module_names]
|
||||||
|
installed_module_ids = self.env["ir.module.module"].search(
|
||||||
|
[
|
||||||
|
("name", "in", module_names),
|
||||||
|
("state", "=", "installed"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
return len(module_names) == len(installed_module_ids)
|
||||||
|
|
||||||
|
def check_models_present(
|
||||||
|
self, model_name, warning_if_not_found=True
|
||||||
|
) -> bool:
|
||||||
|
if self.env["ir.model"].search([("model", "=", model_name)]):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
if warning_if_not_found:
|
||||||
|
logger.warning(
|
||||||
|
"Model '%s' not found."
|
||||||
|
" Part of the script will be skipped." % (model_name)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def install_modules(self, module_names):
|
||||||
|
if type(module_names) == str:
|
||||||
|
module_names = [module_names]
|
||||||
|
installed_modules = []
|
||||||
|
i = 0
|
||||||
|
for module_name in module_names:
|
||||||
|
i += 1
|
||||||
|
prefix = str(i) + "/" + str(len(module_names))
|
||||||
|
modules = self.browse_by_search(
|
||||||
|
"ir.module.module", [("name", "=", module_name)]
|
||||||
|
)
|
||||||
|
if not len(modules):
|
||||||
|
logger.error(
|
||||||
|
"%s - Module '%s': Not found." % (prefix, module_name)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
module = modules[0]
|
||||||
|
if module.state == "installed":
|
||||||
|
logger.info(
|
||||||
|
"%s - Module %s still installed."
|
||||||
|
" skipped." % (prefix, module_name)
|
||||||
|
)
|
||||||
|
elif module.state == "uninstalled":
|
||||||
|
try_qty = 0
|
||||||
|
installed = False
|
||||||
|
while installed is False:
|
||||||
|
try_qty += 1
|
||||||
|
logger.info(
|
||||||
|
"%s - Module '%s': Installing ... %s"
|
||||||
|
% (
|
||||||
|
prefix,
|
||||||
|
module_name,
|
||||||
|
"(try #%d)" % try_qty if try_qty != 1 else "",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
module.button_immediate_install()
|
||||||
|
installed = True
|
||||||
|
installed_modules.append(module_name)
|
||||||
|
time.sleep(5)
|
||||||
|
except Exception as e:
|
||||||
|
if try_qty <= 5:
|
||||||
|
sleeping_time = 2 * try_qty * 60
|
||||||
|
logger.warning(
|
||||||
|
"Error. Retrying in %d seconds.\n %s"
|
||||||
|
% (sleeping_time, e)
|
||||||
|
)
|
||||||
|
time.sleep(sleeping_time)
|
||||||
|
else:
|
||||||
|
logger.critical(
|
||||||
|
"Error after %d try. Exiting.\n %s"
|
||||||
|
% (try_qty, e)
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"%s - Module '%s': In the %s state."
|
||||||
|
" (Unable to install)"
|
||||||
|
% (prefix, module_name, module.state)
|
||||||
|
)
|
||||||
|
return installed_modules
|
||||||
|
|
||||||
|
def uninstall_modules(self, module_names):
|
||||||
|
if type(module_names) == str:
|
||||||
|
module_names = [module_names]
|
||||||
|
i = 0
|
||||||
|
for module_name in module_names:
|
||||||
|
i += 1
|
||||||
|
prefix = str(i) + "/" + str(len(module_names))
|
||||||
|
modules = self.browse_by_search(
|
||||||
|
"ir.module.module", [("name", "=", module_name)]
|
||||||
|
)
|
||||||
|
if not len(modules):
|
||||||
|
logger.error(
|
||||||
|
"%s - Module '%s': Not found." % (prefix, module_name)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
module = modules[0]
|
||||||
|
if module.state in (
|
||||||
|
"installed",
|
||||||
|
"to upgrade",
|
||||||
|
"to update",
|
||||||
|
"to remove",
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
"%s - Module '%s': Uninstalling .." % (prefix, module_name)
|
||||||
|
)
|
||||||
|
module.button_upgrade_cancel()
|
||||||
|
module.button_uninstall()
|
||||||
|
wizard = self.browse_by_create("base.module.upgrade", {})
|
||||||
|
wizard.upgrade_module()
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"%s - Module '%s': In the %s state."
|
||||||
|
" (Unable to uninstall)"
|
||||||
|
% (prefix, module_name, module.state)
|
||||||
|
)
|
||||||
102
odoo_openupgrade_wizard/tools_odoo_module.py
Normal file
102
odoo_openupgrade_wizard/tools_odoo_module.py
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
from functools import total_ordering
|
||||||
|
|
||||||
|
from git import Repo
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import (
|
||||||
|
get_odoo_addons_path,
|
||||||
|
get_odoo_env_path,
|
||||||
|
get_odoo_modules_from_csv,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Analysis(object):
|
||||||
|
|
||||||
|
modules = []
|
||||||
|
|
||||||
|
def __init__(self, ctx):
|
||||||
|
module_names = get_odoo_modules_from_csv(ctx.obj["module_file_path"])
|
||||||
|
|
||||||
|
initial_release = ctx.obj["config"]["odoo_versions"][0]["release"]
|
||||||
|
|
||||||
|
# Instanciate a new odoo_module
|
||||||
|
for module_name in module_names:
|
||||||
|
repository_name = OdooModule.find_repository(
|
||||||
|
ctx, module_name, initial_release
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
repository_name
|
||||||
|
and "%s.%s" % (repository_name, module_name)
|
||||||
|
not in self.modules
|
||||||
|
):
|
||||||
|
logger.debug(
|
||||||
|
"Discovering module '%s' in %s for release %s"
|
||||||
|
% (module_name, repository_name, initial_release)
|
||||||
|
)
|
||||||
|
self.modules.append(
|
||||||
|
OdooModule(ctx, module_name, repository_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@total_ordering
|
||||||
|
class OdooModule(object):
|
||||||
|
|
||||||
|
active = True
|
||||||
|
name = False
|
||||||
|
repository = False
|
||||||
|
module_type = False
|
||||||
|
unique_name = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def find_repository(cls, ctx, module_name, current_release):
|
||||||
|
|
||||||
|
# Try to find the repository that contains the module
|
||||||
|
main_path = get_odoo_env_path(ctx, {"release": current_release})
|
||||||
|
addons_path = get_odoo_addons_path(
|
||||||
|
ctx, main_path, {"release": current_release, "action": "update"}
|
||||||
|
)
|
||||||
|
for addon_path in addons_path:
|
||||||
|
if (addon_path / module_name).exists():
|
||||||
|
|
||||||
|
if str(addon_path).endswith("odoo/odoo/addons"):
|
||||||
|
path = addon_path.parent.parent
|
||||||
|
elif str(addon_path).endswith("odoo/addons"):
|
||||||
|
path = addon_path.parent
|
||||||
|
else:
|
||||||
|
path = addon_path
|
||||||
|
repo = Repo(str(path))
|
||||||
|
repository_name = repo.remotes[0].url.replace(
|
||||||
|
"https://github.com/", ""
|
||||||
|
)
|
||||||
|
|
||||||
|
return repository_name
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __init__(self, ctx, module_name, repository_name):
|
||||||
|
self.name = module_name
|
||||||
|
self.repository = repository_name
|
||||||
|
if repository_name == "odoo/odoo":
|
||||||
|
self.module_type = "odoo"
|
||||||
|
elif repository_name.startswith("OCA"):
|
||||||
|
self.module_type = "OCA"
|
||||||
|
else:
|
||||||
|
self.module_type = "custom"
|
||||||
|
self.unique_name = "%s.%s" % (repository_name, module_name)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, str):
|
||||||
|
return self.unique_name == other
|
||||||
|
elif isinstance(other, OdooModule):
|
||||||
|
return self.unique_name == other.unique_name
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if self.module_type != other.module_type:
|
||||||
|
if self.module_type == "odoo":
|
||||||
|
return True
|
||||||
|
elif self.module_type == "OCA" and self.module_type == "custom":
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return self.name < other.name
|
||||||
152
odoo_openupgrade_wizard/tools_postgres.py
Normal file
152
odoo_openupgrade_wizard/tools_postgres.py
Normal file
|
|
@ -0,0 +1,152 @@
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_docker import (
|
||||||
|
get_docker_client,
|
||||||
|
run_container,
|
||||||
|
)
|
||||||
|
from odoo_openupgrade_wizard.tools_system import get_script_folder
|
||||||
|
|
||||||
|
|
||||||
|
def get_postgres_container(ctx):
|
||||||
|
client = get_docker_client()
|
||||||
|
image_name = ctx.obj["config"]["postgres_image_name"]
|
||||||
|
container_name = ctx.obj["config"]["postgres_container_name"]
|
||||||
|
containers = client.containers.list(filters={"name": container_name})
|
||||||
|
if containers:
|
||||||
|
return containers[0]
|
||||||
|
|
||||||
|
logger.info("Launching Postgres Container. (Image %s)" % image_name)
|
||||||
|
container = run_container(
|
||||||
|
image_name,
|
||||||
|
container_name,
|
||||||
|
environments={
|
||||||
|
"POSTGRES_USER": "odoo",
|
||||||
|
"POSTGRES_PASSWORD": "odoo",
|
||||||
|
"POSTGRES_DB": "postgres",
|
||||||
|
"PGDATA": "/var/lib/postgresql/data/pgdata",
|
||||||
|
},
|
||||||
|
volumes={
|
||||||
|
ctx.obj["env_folder_path"]: "/env/",
|
||||||
|
ctx.obj[
|
||||||
|
"postgres_folder_path"
|
||||||
|
]: "/var/lib/postgresql/data/pgdata/",
|
||||||
|
},
|
||||||
|
detach=True,
|
||||||
|
)
|
||||||
|
# TODO, improve me.
|
||||||
|
# Postgres container doesn't seems available immediately.
|
||||||
|
# check in odoo container, i remember that there is
|
||||||
|
# some script to do the job
|
||||||
|
time.sleep(5)
|
||||||
|
return container
|
||||||
|
|
||||||
|
|
||||||
|
def execute_sql_file(ctx, database, sql_file):
|
||||||
|
container = get_postgres_container(ctx)
|
||||||
|
|
||||||
|
# Recreate relative path to make posible to
|
||||||
|
# call psql in the container
|
||||||
|
if str(ctx.obj["env_folder_path"]) not in str(sql_file):
|
||||||
|
raise Exception(
|
||||||
|
"The SQL file %s is not in the"
|
||||||
|
" main folder %s available"
|
||||||
|
" in the postgres container."
|
||||||
|
% (sql_file, ctx.obj["env_folder_path"])
|
||||||
|
)
|
||||||
|
relative_path = Path(
|
||||||
|
str(sql_file).replace(str(ctx.obj["env_folder_path"]), ".")
|
||||||
|
)
|
||||||
|
|
||||||
|
container_path = Path("/env/") / relative_path
|
||||||
|
docker_command = (
|
||||||
|
"psql" " --username=odoo" " --dbname={database}" " --file {file_path}"
|
||||||
|
).format(database=database, file_path=container_path)
|
||||||
|
logger.info(
|
||||||
|
"Executing the script '%s' in postgres container"
|
||||||
|
" on database %s" % (relative_path, database)
|
||||||
|
)
|
||||||
|
docker_result = container.exec_run(docker_command)
|
||||||
|
if docker_result.exit_code != 0:
|
||||||
|
raise Exception(
|
||||||
|
"The script '%s' failed on database %s. Exit Code : %d"
|
||||||
|
% (relative_path, database, docker_result.exit_code)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_sql_request(ctx, request, database="postgres"):
|
||||||
|
container = get_postgres_container(ctx)
|
||||||
|
docker_command = (
|
||||||
|
"psql"
|
||||||
|
" --username=odoo"
|
||||||
|
" --dbname={database}"
|
||||||
|
" --tuples-only"
|
||||||
|
' --command "{request}"'
|
||||||
|
).format(database=database, request=request)
|
||||||
|
logger.debug(
|
||||||
|
"Executing the following command in postgres container"
|
||||||
|
" on database %s \n %s" % (database, request)
|
||||||
|
)
|
||||||
|
docker_result = container.exec_run(docker_command)
|
||||||
|
if docker_result.exit_code != 0:
|
||||||
|
raise Exception(
|
||||||
|
"Request %s failed on database %s. Exit Code : %d"
|
||||||
|
% (request, database, docker_result.exit_code)
|
||||||
|
)
|
||||||
|
lines = docker_result.output.decode("utf-8").split("\n")
|
||||||
|
result = []
|
||||||
|
for line in lines:
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
result.append([x.strip() for x in line.split("|")])
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_database(ctx, database: str, state="present"):
|
||||||
|
"""
|
||||||
|
- Connect to postgres container.
|
||||||
|
- Check if the database exist.
|
||||||
|
- if doesn't exists and state == 'present', create it.
|
||||||
|
- if exists and state == 'absent', drop it.
|
||||||
|
"""
|
||||||
|
request = "select datname FROM pg_database WHERE datistemplate = false;"
|
||||||
|
|
||||||
|
result = execute_sql_request(ctx, request)
|
||||||
|
|
||||||
|
if state == "present":
|
||||||
|
if [database] in result:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Create database '%s' ..." % database)
|
||||||
|
request = "CREATE DATABASE {database} owner odoo;".format(
|
||||||
|
database=database
|
||||||
|
)
|
||||||
|
execute_sql_request(ctx, request)
|
||||||
|
else:
|
||||||
|
if [database] not in result:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Drop database '%s' ..." % database)
|
||||||
|
request = "DROP DATABASE {database};".format(database=database)
|
||||||
|
execute_sql_request(ctx, request)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_sql_files_pre_migration(
|
||||||
|
ctx, database: str, migration_step: dict, sql_files: list = []
|
||||||
|
):
|
||||||
|
ensure_database(ctx, database, state="present")
|
||||||
|
if not sql_files:
|
||||||
|
script_folder = get_script_folder(ctx, migration_step)
|
||||||
|
sql_files = [
|
||||||
|
script_folder / Path(f)
|
||||||
|
for f in os.listdir(script_folder)
|
||||||
|
if os.path.isfile(os.path.join(script_folder, f))
|
||||||
|
and f[-4:] == ".sql"
|
||||||
|
]
|
||||||
|
sql_files = sorted(sql_files)
|
||||||
|
|
||||||
|
for sql_file in sql_files:
|
||||||
|
execute_sql_file(ctx, database, sql_file)
|
||||||
|
|
@ -6,10 +6,27 @@ from git_aggregator import main as gitaggregate_cmd
|
||||||
from git_aggregator.utils import working_directory_keeper
|
from git_aggregator.utils import working_directory_keeper
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from plumbum.cmd import mkdir
|
from plumbum.cmd import chmod, mkdir
|
||||||
|
from plumbum.commands.processes import ProcessExecutionError
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard import templates
|
||||||
|
|
||||||
|
|
||||||
def ensure_folder_exists(folder_path: Path, mode: str = False):
|
def get_script_folder(ctx, migration_step: dict) -> Path:
|
||||||
|
return ctx.obj["script_folder_path"] / migration_step["complete_name"]
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_folder_writable(folder_path: Path):
|
||||||
|
logger.info("Make writable the folder '%s'" % folder_path)
|
||||||
|
try:
|
||||||
|
chmod(["--silent", "--recursive", "o+w", str(folder_path)])
|
||||||
|
except ProcessExecutionError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_folder_exists(
|
||||||
|
folder_path: Path, mode: str = "755", git_ignore_content: bool = False
|
||||||
|
):
|
||||||
"""Create a local folder.
|
"""Create a local folder.
|
||||||
- directory is created if it doesn't exist.
|
- directory is created if it doesn't exist.
|
||||||
- mode is applied if defined.
|
- mode is applied if defined.
|
||||||
|
|
@ -17,11 +34,16 @@ def ensure_folder_exists(folder_path: Path, mode: str = False):
|
||||||
"""
|
"""
|
||||||
if not folder_path.exists():
|
if not folder_path.exists():
|
||||||
cmd = ["--parents", folder_path]
|
cmd = ["--parents", folder_path]
|
||||||
if mode:
|
cmd = ["--mode", mode] + cmd
|
||||||
cmd = ["--mode", "755"] + cmd
|
|
||||||
logger.info("Creating folder '%s' ..." % (folder_path))
|
logger.info("Creating folder '%s' ..." % (folder_path))
|
||||||
mkdir(cmd)
|
mkdir(cmd)
|
||||||
|
|
||||||
|
if git_ignore_content:
|
||||||
|
ensure_file_exists_from_template(
|
||||||
|
folder_path / Path(".gitignore"),
|
||||||
|
templates.GIT_IGNORE_CONTENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def ensure_file_exists_from_template(
|
def ensure_file_exists_from_template(
|
||||||
file_path: Path, template_name: str, **args
|
file_path: Path, template_name: str, **args
|
||||||
|
|
@ -58,7 +80,7 @@ def git_aggregate(folder_path: Path, config_path: Path):
|
||||||
do_push=False,
|
do_push=False,
|
||||||
expand_env=False,
|
expand_env=False,
|
||||||
env_file=None,
|
env_file=None,
|
||||||
force=False,
|
force=True,
|
||||||
)
|
)
|
||||||
with working_directory_keeper:
|
with working_directory_keeper:
|
||||||
os.chdir(folder_path)
|
os.chdir(folder_path)
|
||||||
|
|
|
||||||
314
poetry.lock
generated
314
poetry.lock
generated
|
|
@ -61,11 +61,11 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2021.10.8"
|
version = "2022.5.18.1"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
|
|
@ -97,6 +97,17 @@ python-versions = "*"
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
click = "*"
|
click = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-loglevel"
|
||||||
|
version = "0.4.0.post1"
|
||||||
|
description = "Log level parameter type for Click"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "~=3.6"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=6.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
version = "0.4.4"
|
version = "0.4.4"
|
||||||
|
|
@ -138,6 +149,23 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "docker"
|
||||||
|
version = "5.0.3"
|
||||||
|
description = "A Python library for the Docker Engine API."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pywin32 = {version = "227", markers = "sys_platform == \"win32\""}
|
||||||
|
requests = ">=2.14.2,<2.18.0 || >2.18.0"
|
||||||
|
websocket-client = ">=0.32.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
ssh = ["paramiko (>=2.4.2)"]
|
||||||
|
tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dparse"
|
name = "dparse"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
|
|
@ -180,6 +208,29 @@ colorama = "*"
|
||||||
kaptan = "*"
|
kaptan = "*"
|
||||||
requests = "*"
|
requests = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gitdb"
|
||||||
|
version = "4.0.9"
|
||||||
|
description = "Git Object Database"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
smmap = ">=3.0.1,<6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gitpython"
|
||||||
|
version = "3.1.20"
|
||||||
|
description = "Python Git Library"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
gitdb = ">=4.0.1,<5"
|
||||||
|
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.3"
|
version = "3.3"
|
||||||
|
|
@ -190,7 +241,7 @@ python-versions = ">=3.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "immutables"
|
name = "immutables"
|
||||||
version = "0.17"
|
version = "0.18"
|
||||||
description = "Immutable Collections"
|
description = "Immutable Collections"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -200,7 +251,7 @@ python-versions = ">=3.6"
|
||||||
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
|
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"]
|
test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (==0.942)", "pytest (>=6.2.4,<6.3.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-metadata"
|
name = "importlib-metadata"
|
||||||
|
|
@ -331,6 +382,14 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "odoorpc"
|
||||||
|
version = "0.8.0"
|
||||||
|
description = "OdooRPC is a Python package providing an easy way to pilot your Odoo servers through RPC."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "21.3"
|
version = "21.3"
|
||||||
|
|
@ -460,7 +519,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "7.1.1"
|
version = "7.1.2"
|
||||||
description = "pytest: simple powerful testing with Python"
|
description = "pytest: simple powerful testing with Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -496,7 +555,7 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pywin32"
|
name = "pywin32"
|
||||||
version = "303"
|
version = "227"
|
||||||
description = "Python for Window Extensions"
|
description = "Python for Window Extensions"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -561,6 +620,14 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smmap"
|
||||||
|
version = "5.0.0"
|
||||||
|
description = "A pure Python implementation of a sliding window memory map manager"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.10.2"
|
version = "0.10.2"
|
||||||
|
|
@ -597,7 +664,7 @@ dev = ["packaging"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tox"
|
name = "tox"
|
||||||
version = "3.24.5"
|
version = "3.25.0"
|
||||||
description = "tox is a generic virtualenv management and test command line tool"
|
description = "tox is a generic virtualenv management and test command line tool"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -620,7 +687,7 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytes
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typed-ast"
|
name = "typed-ast"
|
||||||
version = "1.5.2"
|
version = "1.5.4"
|
||||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -649,7 +716,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "virtualenv"
|
name = "virtualenv"
|
||||||
version = "20.14.0"
|
version = "20.14.1"
|
||||||
description = "Virtual Python Environment builder"
|
description = "Virtual Python Environment builder"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -667,6 +734,19 @@ six = ">=1.9.0,<2"
|
||||||
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
|
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
|
||||||
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
|
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "websocket-client"
|
||||||
|
version = "1.3.1"
|
||||||
|
description = "WebSocket client for Python with low level API options"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
|
||||||
|
optional = ["python-socks", "wsaccel"]
|
||||||
|
test = ["websockets"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "win32-setctime"
|
name = "win32-setctime"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
|
|
@ -701,7 +781,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.6"
|
python-versions = "^3.6"
|
||||||
content-hash = "cff9b335836283e1c1f9185ba1b575de62e19d3d6409257a03836f9a8cfaaaf5"
|
content-hash = "c86a563043f2c105d46b393c93b6d10d67e35917d5dfbd0dd83daf42e62e3dcd"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
aiocontextvars = [
|
aiocontextvars = [
|
||||||
|
|
@ -725,8 +805,8 @@ attrs = [
|
||||||
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||||
]
|
]
|
||||||
certifi = [
|
certifi = [
|
||||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
{file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"},
|
||||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
{file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"},
|
||||||
]
|
]
|
||||||
charset-normalizer = [
|
charset-normalizer = [
|
||||||
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
||||||
|
|
@ -739,6 +819,10 @@ click = [
|
||||||
click-default-group = [
|
click-default-group = [
|
||||||
{file = "click-default-group-1.2.2.tar.gz", hash = "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904"},
|
{file = "click-default-group-1.2.2.tar.gz", hash = "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904"},
|
||||||
]
|
]
|
||||||
|
click-loglevel = [
|
||||||
|
{file = "click-loglevel-0.4.0.post1.tar.gz", hash = "sha256:470bf1e208fe650cedacb23061e4e18d36df601ca9d8b79e6d8e8cdf1792ece1"},
|
||||||
|
{file = "click_loglevel-0.4.0.post1-py3-none-any.whl", hash = "sha256:f3449b5d28d6cba5bfbeed371ad59950aba035730d5cc28a32b4e7632e17ed6c"},
|
||||||
|
]
|
||||||
colorama = [
|
colorama = [
|
||||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
||||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
||||||
|
|
@ -799,6 +883,10 @@ distlib = [
|
||||||
{file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
|
{file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
|
||||||
{file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
|
{file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
|
||||||
]
|
]
|
||||||
|
docker = [
|
||||||
|
{file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"},
|
||||||
|
{file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"},
|
||||||
|
]
|
||||||
dparse = [
|
dparse = [
|
||||||
{file = "dparse-0.5.1-py3-none-any.whl", hash = "sha256:e953a25e44ebb60a5c6efc2add4420c177f1d8404509da88da9729202f306994"},
|
{file = "dparse-0.5.1-py3-none-any.whl", hash = "sha256:e953a25e44ebb60a5c6efc2add4420c177f1d8404509da88da9729202f306994"},
|
||||||
{file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"},
|
{file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"},
|
||||||
|
|
@ -811,60 +899,68 @@ git-aggregator = [
|
||||||
{file = "git-aggregator-2.1.0.tar.gz", hash = "sha256:efdc4d3f360fd63ef5b14e7064ce5edb14ea404c6a4047715cfc5b9384ff49cc"},
|
{file = "git-aggregator-2.1.0.tar.gz", hash = "sha256:efdc4d3f360fd63ef5b14e7064ce5edb14ea404c6a4047715cfc5b9384ff49cc"},
|
||||||
{file = "git_aggregator-2.1.0-py3-none-any.whl", hash = "sha256:59986c0ff7a1641849504dc4d86491872d9f65b46a076aac4bf21cd550ff61df"},
|
{file = "git_aggregator-2.1.0-py3-none-any.whl", hash = "sha256:59986c0ff7a1641849504dc4d86491872d9f65b46a076aac4bf21cd550ff61df"},
|
||||||
]
|
]
|
||||||
|
gitdb = [
|
||||||
|
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
|
||||||
|
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
|
||||||
|
]
|
||||||
|
gitpython = [
|
||||||
|
{file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"},
|
||||||
|
{file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"},
|
||||||
|
]
|
||||||
idna = [
|
idna = [
|
||||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||||
]
|
]
|
||||||
immutables = [
|
immutables = [
|
||||||
{file = "immutables-0.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cab10d65a29b2019fffd7a3924f6965a8f785e7bd409641ce36ab2d3335f88c4"},
|
{file = "immutables-0.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d841dfa15b932bdad27f5149bce86b32d0dd8a29679ed61405677317b6893447"},
|
||||||
{file = "immutables-0.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f73088c9b8595ddfd45a5658f8cce0cb3ae6e5890458381fccba3ed3035081d4"},
|
{file = "immutables-0.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a5886845cd0ca8263b721337750a895e28feee2f16694a526977a791909db5"},
|
||||||
{file = "immutables-0.17-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef632832fa1acae6861d83572b866126f9e35706ab6e581ce6b175b3e0b7a3c4"},
|
{file = "immutables-0.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e979a9225507e3cd830ea73ac68b69fe82f495313a891485800daa5b6567e05"},
|
||||||
{file = "immutables-0.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0efdcec7b63859b41f794ffa0cd0d6dc87e77d1be4ff0ec23471a3a1e719235f"},
|
{file = "immutables-0.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9949f704b80d0e601587d0a3b1a0cc6ff5d49528f6dfc1c8a1476b2137bb925e"},
|
||||||
{file = "immutables-0.17-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eca96f12bc1535657d24eae2c69816d0b22c4a4bc7f4753115e028a137e8dad"},
|
{file = "immutables-0.18-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6c820c9bb5aac62b76de703384bb8bb706108be90c3def4a7f047f185a92bb"},
|
||||||
{file = "immutables-0.17-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:01a25b1056754aa486afea5471ca348410d77f458477ccb6fa3baf2d3e3ff3d5"},
|
{file = "immutables-0.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:03696193b276db3a9b619629685198886ddd7c4098c544bd8d0f87532c74120b"},
|
||||||
{file = "immutables-0.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c41a6648f7355f1241da677c418edae56fdc45af19ad3540ca8a1e7a81606a7a"},
|
{file = "immutables-0.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:798b4d6c388116effa7523591e4e39865292e4fa74e169b05a0759a16f604ce1"},
|
||||||
{file = "immutables-0.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0b578bba11bd8ae55dee9536edf8d82be18463d15d4b4c9827e27eeeb73826bf"},
|
{file = "immutables-0.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b3621256bc8058a7973f736b9e2c940e17133476265a0a83b8df8c0f446ca32f"},
|
||||||
{file = "immutables-0.17-cp310-cp310-win32.whl", hash = "sha256:a28682e115191e909673aedb9ccea3377da3a6a929f8bd86982a2a76bdfa89db"},
|
{file = "immutables-0.18-cp310-cp310-win32.whl", hash = "sha256:98f67bd36532582751dcc9021fdb60e7efc82e5717ae5927b84d0b86ea58fe12"},
|
||||||
{file = "immutables-0.17-cp310-cp310-win_amd64.whl", hash = "sha256:293ddb681502945f29b3065e688a962e191e752320040892316b9dd1e3b9c8c9"},
|
{file = "immutables-0.18-cp310-cp310-win_amd64.whl", hash = "sha256:69352b45a115808219feaf0bb7a551e9aa76c72684db93cd03f11474165f4569"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ec04fc7d9f76f26d82a5d9d1715df0409d0096309828fc46cd1a2067c7fbab95"},
|
{file = "immutables-0.18-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ee2d6f8816fce53fa89b6a1ba2d4a96b344bf584d6ed0b10a871b17fff46e49"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f024f25e9fda42251a2b2167668ca70678c19fb3ab6ed509cef0b4b431d0ff73"},
|
{file = "immutables-0.18-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13159cedb698fdd243d9f2a7469c1628e075a180fc02f865dd98322b92a14aaf"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b02083b2052cc201ac5cbd38f34a5da21fcd51016cb4ddd1fb43d7dc113eac17"},
|
{file = "immutables-0.18-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d72527fde329e3b566b67c954237be52b07d6e84ff23dcc1e94499755cacff6"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea32db31afb82d8369e98f85c5b815ff81610a12fbc837830a34388f1b56f080"},
|
{file = "immutables-0.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53fccddd28cc3214aa48ca564702311c07eac069190dd890e097802c5d69b33a"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:898a9472d1dd3d17f291114395a1be65be035355fc65af0b2c88238f8fbeaa62"},
|
{file = "immutables-0.18-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a29e3aa0fe05fb2cc6b31039f448aa6206d7f0cdb660c98aa9be6d12070d6840"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:736dd3d88d44da0ee48804792bd095c01a344c5d1b0f10beeb9ccb3a00b9c19d"},
|
{file = "immutables-0.18-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:ffced8535cc673fcfb411d28ba5744689a6978fa596c803725a76f43c1bda911"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:15ff4139720f79b902f435a25e3c00f9c8adcc41d79bed64b7e51ae36cfe9620"},
|
{file = "immutables-0.18-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9f17407491164beb689d426f7985f79ae9dfa69868653cfbdb95645f6bf05cb0"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-win32.whl", hash = "sha256:4f018a6c4c3689b82f763ad4f84dec6aa91c83981db7f6bafef963f036e5e815"},
|
{file = "immutables-0.18-cp36-cp36m-win32.whl", hash = "sha256:74456c579cfd53f883cdcc0700e3871648a3316767efc1adf8c723ad3d8addec"},
|
||||||
{file = "immutables-0.17-cp36-cp36m-win_amd64.whl", hash = "sha256:d7400a6753b292ac80102ed026efa8da2c3fedd50c443924cbe9b6448d3b19e4"},
|
{file = "immutables-0.18-cp36-cp36m-win_amd64.whl", hash = "sha256:e4c2110173649acf67bd763bbd2a9c3a863a1d20fd7f3db3493ce4e0fb04fae5"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f7a6e0380bddb99c46bb3f12ae5eee9a23d6a66d99bbf0fb10fa552f935c2e8d"},
|
{file = "immutables-0.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa5292630b08c874972931bac06ee381cb6fb7382d7be1856234d7bd4a8e676"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7696c42d1f9a16ecda0ee46229848df8706973690b45e8a090d995d647a5ec57"},
|
{file = "immutables-0.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc830a689a55e404f0e23d7d69e01c218fa8a0be54a6ca5df45b6fbfeeac648a"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:892b6a1619cd8c398fa70302c4cfa9768a694377639330e7a58cc7be111ab23e"},
|
{file = "immutables-0.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5caf9c670e6851e7f310716c7dcdf8705236d13056eda1fab3deaad5d7198468"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89093d5a85357250b1d5ae218fdcfdbac4097cbb2d8b55004aa7a2ca2a00a09f"},
|
{file = "immutables-0.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:853d63f4a07b2ea2131ba0831aeec11f6a6ee5e290e8f175bf56842762d7412e"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99a8bc6d0623300eb46beea74f7a5061968fb3efc4e072f23f6c0b21c588238d"},
|
{file = "immutables-0.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9a86dcca4bb406f80e7a18c233aec0e76a7530c456e24aa1e19a708a34f2aac1"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:00380474f8e3b4a2eeb06ce694e0e3cb85a144919140a2b3116defb6c1587471"},
|
{file = "immutables-0.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6baf4dc11ba0e9f41a6cbde7ecaa7af9cb482559b92ba3254e3e37a518b1970e"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:078e3ed63be0ac36523b80bbabbfb1bb57e55009f4efb5650b0e3b3ed569c3f1"},
|
{file = "immutables-0.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:734ec4467dd15f9135ca5ecccc91e796a67d27c227e81554f9e06b1bb3b28d6d"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-win32.whl", hash = "sha256:14905aecc62b318d86045dcf8d35ef2063803d9d331aeccd88958f03caadc7b0"},
|
{file = "immutables-0.18-cp37-cp37m-win32.whl", hash = "sha256:f6edb73619aa0a5fe4a77d97dd9d39bfeef61a5afe71aa5bdceccf59b933999e"},
|
||||||
{file = "immutables-0.17-cp37-cp37m-win_amd64.whl", hash = "sha256:3774d403d1570105a1da2e00c38ce3f04065fd1deff04cf998f8d8e946d0ae13"},
|
{file = "immutables-0.18-cp37-cp37m-win_amd64.whl", hash = "sha256:fade8ccf7afbc1e7ea353159fa90cc04395f2f4f57658160d7a02f6aa60c4e77"},
|
||||||
{file = "immutables-0.17-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e5a9caee1b99eccf1447056ae6bda77edd15c357421293e81fa1a4f28e83448a"},
|
{file = "immutables-0.18-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8b650d779a46441dccd02e7ee8326dbd0dec633c6bd75e9fe13373a6b19570dd"},
|
||||||
{file = "immutables-0.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fed1e1baf1de1bc94a0310da29814892064928d7d40ff5a3b86bcd11d5e7cfff"},
|
{file = "immutables-0.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1acbbc333f1643fd1ed21bcc3e09aad2ef6648478a0cae76a2ca5823764a7d3b"},
|
||||||
{file = "immutables-0.17-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d7daa340d76747ba5a8f64816b48def74bd4be45a9508073b34fa954d099fba"},
|
{file = "immutables-0.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3bad4d43009fa61ea40d887e6fa89ae7c4e62dff5e4a878d60b76cf245720bb"},
|
||||||
{file = "immutables-0.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4644c29fe07fb92ba84b26659708e1799fecaaf781214adf13edd8a4d7495a9"},
|
{file = "immutables-0.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e04b61ddffd4ccb4d7ab823b2e55dbb4ad47c37697e311fae4b98b3c023ab194"},
|
||||||
{file = "immutables-0.17-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e9ea0e2a31db44fb01617ff875d4c26f962696e1c5ff11ed7767c2d8dedac4"},
|
{file = "immutables-0.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54577e46c5332d7390212040c084335b7d667504847ed2788428d44f20e595ce"},
|
||||||
{file = "immutables-0.17-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:64100dfdb29fae2bc84748fff5d66dd6b3997806c717eeb75f7099aeee9b1878"},
|
{file = "immutables-0.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1330f96eb6a3a11f5d02f30b2c6393ef30d01a79f7144d63d2a3e6ff05cb99db"},
|
||||||
{file = "immutables-0.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5f933e5bf6f2c1afb24bc2fc8bea8b132096a4a6ba54f36be59787981f3e50ff"},
|
{file = "immutables-0.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1d6821d7718cf9f4a7b1d9e765fc22a9d1ae0fad3fabd8724b4e614d2a6e0b54"},
|
||||||
{file = "immutables-0.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9508a087a47f9f9506adf2fa8383ab14c46a222b57eea8612bc4c2aa9a9550fe"},
|
{file = "immutables-0.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45bd862a5dfb952eaff4a9c2448712c5a550dd956575e23cbfc512010fb06c74"},
|
||||||
{file = "immutables-0.17-cp38-cp38-win32.whl", hash = "sha256:dfd2c63f15d1e5ea1ed2a05b7c602b5f61a64337415d299df20e103a57ae4906"},
|
{file = "immutables-0.18-cp38-cp38-win32.whl", hash = "sha256:989606e440492736112b471dcd80586e3d4a63bc6f8ff4f9d1d612e0f96cb683"},
|
||||||
{file = "immutables-0.17-cp38-cp38-win_amd64.whl", hash = "sha256:301c539660c988c5b24051ccad1e36c040a916f1e58fa3e245e3122fc50dd28d"},
|
{file = "immutables-0.18-cp38-cp38-win_amd64.whl", hash = "sha256:ac9e05f846392e983fb59f74ed2334031b366251d16d24122e4c85f70fb6e2da"},
|
||||||
{file = "immutables-0.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:563bc2ddbe75c10faa3b4b0206870653b44a231b97ed23cff8ab8aff503d922d"},
|
{file = "immutables-0.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:de1a091ab89b7ba50501a915a0fbdceb52b079c752f4f7c76d2060237774a714"},
|
||||||
{file = "immutables-0.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f621ea6130393cd14d0fbd35b306d4dc70bcd0fda550a8cd313db8015e34ca60"},
|
{file = "immutables-0.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d43b16b6adbe1327c6688e14b125cb3b940e748790b305de96c8d55668ac25f"},
|
||||||
{file = "immutables-0.17-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c2d1b16b716bca70345db334dd6a861bf45c46cb11bb1801277f8a9012e864"},
|
{file = "immutables-0.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f32b5933393e4cc204d8f9e7d9f503ec052e30f612090be0de0dd31b1464b35"},
|
||||||
{file = "immutables-0.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a08e1a80bd8c5df72c2bf0af24a37ceec17e8ffdb850ed5a62d0bba1d4d86018"},
|
{file = "immutables-0.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525fe9001b5a96c325eec41677efaeb8c3610776e834ce7f31fbe3d33cc05252"},
|
||||||
{file = "immutables-0.17-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b99155ad112149d43208c611c6c42f19e16716526dacc0fcc16736d2f5d2e20"},
|
{file = "immutables-0.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11da4946e19f3b24a873b2ba2891cc226a89bb398561c62dfb966a9b6501a4a"},
|
||||||
{file = "immutables-0.17-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ed71e736f8fb82545d00c8969dbc167547c15e85729058edbed3c03b94fca86c"},
|
{file = "immutables-0.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:90da9dea0a1c0a907d511f124cd87fe090c0e30a951c3fe68bc9782ae4f2c77f"},
|
||||||
{file = "immutables-0.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:19e4b8e5810dd7cab63fa700373f787a369d992166eabc23f4b962e5704d33c5"},
|
{file = "immutables-0.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77bdc96dc24e32839557cde3785f8039a369c95529ff9179044b81d0ba4bd02c"},
|
||||||
{file = "immutables-0.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:305062012497d4c4a70fe35e20cef2c6f65744e721b04671092a63354799988d"},
|
{file = "immutables-0.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:210efea163a704597cfdb2d30713d3c0963c30f0d997539c9ab5da40e3d6a886"},
|
||||||
{file = "immutables-0.17-cp39-cp39-win32.whl", hash = "sha256:f5c6bd012384a8d6af7bb25675719214d76640fe6c336e2b5fba9eef1407ae6a"},
|
{file = "immutables-0.18-cp39-cp39-win32.whl", hash = "sha256:535616ad7ca1174a27ade637192c970bfedb0b0e0467e69ce415b40d7cf7ba0c"},
|
||||||
{file = "immutables-0.17-cp39-cp39-win_amd64.whl", hash = "sha256:615ab26873a794559ccaf4e0e9afdb5aefad0867c15262ba64a55a12a5a41573"},
|
{file = "immutables-0.18-cp39-cp39-win_amd64.whl", hash = "sha256:1338aad6fd69f11442adcbb3402a028c90f6e945682ddb8aba462a3827f2d427"},
|
||||||
{file = "immutables-0.17.tar.gz", hash = "sha256:ad894446355b6f5289a9c84fb46f7c47c6ef2b1bfbdd2be6cb177dbb7f1587ad"},
|
{file = "immutables-0.18.tar.gz", hash = "sha256:5336c7974084cce62f7e29aaff81a3c3f75e0fd0a23a2faeb986ae0ea08d8cf4"},
|
||||||
]
|
]
|
||||||
importlib-metadata = [
|
importlib-metadata = [
|
||||||
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
|
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
|
||||||
|
|
@ -1011,6 +1107,10 @@ mccabe = [
|
||||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||||
]
|
]
|
||||||
|
odoorpc = [
|
||||||
|
{file = "OdooRPC-0.8.0-py2.py3-none-any.whl", hash = "sha256:abd8fa8385da635ac4848e9a8944c01929c0606cb99743d7b8f2e887800e84e3"},
|
||||||
|
{file = "OdooRPC-0.8.0.tar.gz", hash = "sha256:e90b6315805070fadbe6ced5c3891558216a02a475f0d8882700d219c3f34188"},
|
||||||
|
]
|
||||||
packaging = [
|
packaging = [
|
||||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||||
|
|
@ -1044,26 +1144,26 @@ pyparsing = [
|
||||||
pytest = [
|
pytest = [
|
||||||
{file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"},
|
{file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"},
|
||||||
{file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"},
|
{file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"},
|
||||||
{file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"},
|
{file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
|
||||||
{file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"},
|
{file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
|
||||||
]
|
]
|
||||||
pytest-cov = [
|
pytest-cov = [
|
||||||
{file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
|
{file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
|
||||||
{file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
|
{file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
|
||||||
]
|
]
|
||||||
pywin32 = [
|
pywin32 = [
|
||||||
{file = "pywin32-303-cp310-cp310-win32.whl", hash = "sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb"},
|
{file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"},
|
||||||
{file = "pywin32-303-cp310-cp310-win_amd64.whl", hash = "sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51"},
|
{file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"},
|
||||||
{file = "pywin32-303-cp311-cp311-win32.whl", hash = "sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee"},
|
{file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"},
|
||||||
{file = "pywin32-303-cp311-cp311-win_amd64.whl", hash = "sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439"},
|
{file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"},
|
||||||
{file = "pywin32-303-cp36-cp36m-win32.whl", hash = "sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9"},
|
{file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"},
|
||||||
{file = "pywin32-303-cp36-cp36m-win_amd64.whl", hash = "sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559"},
|
{file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"},
|
||||||
{file = "pywin32-303-cp37-cp37m-win32.whl", hash = "sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e"},
|
{file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"},
|
||||||
{file = "pywin32-303-cp37-cp37m-win_amd64.whl", hash = "sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca"},
|
{file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"},
|
||||||
{file = "pywin32-303-cp38-cp38-win32.whl", hash = "sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b"},
|
{file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"},
|
||||||
{file = "pywin32-303-cp38-cp38-win_amd64.whl", hash = "sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba"},
|
{file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"},
|
||||||
{file = "pywin32-303-cp39-cp39-win32.whl", hash = "sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352"},
|
{file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"},
|
||||||
{file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"},
|
{file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"},
|
||||||
]
|
]
|
||||||
pyyaml = [
|
pyyaml = [
|
||||||
{file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
|
{file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
|
||||||
|
|
@ -1112,6 +1212,10 @@ six = [
|
||||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
]
|
]
|
||||||
|
smmap = [
|
||||||
|
{file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
|
||||||
|
{file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
|
||||||
|
]
|
||||||
toml = [
|
toml = [
|
||||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||||
|
|
@ -1125,34 +1229,34 @@ towncrier = [
|
||||||
{file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"},
|
{file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"},
|
||||||
]
|
]
|
||||||
tox = [
|
tox = [
|
||||||
{file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"},
|
{file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"},
|
||||||
{file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"},
|
{file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"},
|
||||||
]
|
]
|
||||||
typed-ast = [
|
typed-ast = [
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"},
|
{file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"},
|
{file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"},
|
{file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"},
|
{file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
|
||||||
{file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"},
|
{file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"},
|
{file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"},
|
{file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"},
|
{file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
|
||||||
{file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"},
|
{file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"},
|
{file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"},
|
{file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"},
|
{file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
|
||||||
{file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"},
|
{file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"},
|
{file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"},
|
{file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"},
|
{file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"},
|
{file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
|
||||||
{file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"},
|
{file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"},
|
{file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"},
|
{file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"},
|
{file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"},
|
{file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
|
||||||
{file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"},
|
{file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
|
||||||
{file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
|
{file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
|
||||||
]
|
]
|
||||||
typing-extensions = [
|
typing-extensions = [
|
||||||
{file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
|
{file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
|
||||||
|
|
@ -1163,8 +1267,12 @@ urllib3 = [
|
||||||
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
|
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
|
||||||
]
|
]
|
||||||
virtualenv = [
|
virtualenv = [
|
||||||
{file = "virtualenv-20.14.0-py2.py3-none-any.whl", hash = "sha256:1e8588f35e8b42c6ec6841a13c5e88239de1e6e4e4cedfd3916b306dc826ec66"},
|
{file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"},
|
||||||
{file = "virtualenv-20.14.0.tar.gz", hash = "sha256:8e5b402037287126e81ccde9432b95a8be5b19d36584f64957060a3488c11ca8"},
|
{file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"},
|
||||||
|
]
|
||||||
|
websocket-client = [
|
||||||
|
{file = "websocket-client-1.3.1.tar.gz", hash = "sha256:6278a75065395418283f887de7c3beafb3aa68dada5cacbe4b214e8d26da499b"},
|
||||||
|
{file = "websocket_client-1.3.1-py3-none-any.whl", hash = "sha256:074e2ed575e7c822fc0940d31c3ac9bb2b1142c303eafcf3e304e6ce035522e8"},
|
||||||
]
|
]
|
||||||
win32-setctime = [
|
win32-setctime = [
|
||||||
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,11 @@ classifiers = [
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Development Status :: 2 - Pre-Alpha",
|
"Development Status :: 2 - Pre-Alpha",
|
||||||
"Operating System :: Unix",
|
"Operating System :: Unix",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
"Framework :: Odoo",
|
"Framework :: Odoo",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -27,11 +31,15 @@ odoo-openupgrade-wizard = "odoo_openupgrade_wizard.cli:main"
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.6"
|
python = "^3.6"
|
||||||
click = "^7.0"
|
click = "^7.0"
|
||||||
|
click-loglevel = "^0.4"
|
||||||
|
odoorpc = "^0.8"
|
||||||
loguru = "^0.6"
|
loguru = "^0.6"
|
||||||
plumbum = "^1.7"
|
plumbum = "^1.7"
|
||||||
single-source = "^0.3"
|
single-source = "^0.3"
|
||||||
git-aggregator = "^2.1"
|
git-aggregator = "^2.1"
|
||||||
|
docker = "^5.0"
|
||||||
pyyaml = "5.4.1"
|
pyyaml = "5.4.1"
|
||||||
|
GitPython = "^3.1"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = [
|
pytest = [
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,2 @@
|
||||||
[pytest]
|
[pytest]
|
||||||
norecursedirs = tests/output_*
|
norecursedirs=tests/data/*
|
||||||
|
|
|
||||||
64
tests/__init__.py
Normal file
64
tests/__init__.py
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from click.testing import CliRunner
|
||||||
|
from plumbum.cmd import mkdir
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.cli import main
|
||||||
|
|
||||||
|
_logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def assert_result_cli_invoke(result):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def move_to_test_folder():
|
||||||
|
"""function to call at the beginning at the tests
|
||||||
|
to change the current working directory.
|
||||||
|
Note : this function is idempotens, to avoid to generate errors
|
||||||
|
if many tests scripts are executed.
|
||||||
|
"""
|
||||||
|
if os.getcwd().endswith("tests/data/output"):
|
||||||
|
return
|
||||||
|
test_folder_path = Path("tests/data/output")
|
||||||
|
mkdir([test_folder_path, "--parents"])
|
||||||
|
os.chdir(test_folder_path)
|
||||||
|
|
||||||
|
|
||||||
|
def cli_runner_invoke(cmd):
|
||||||
|
result = CliRunner().invoke(
|
||||||
|
main,
|
||||||
|
cmd,
|
||||||
|
catch_exceptions=False,
|
||||||
|
)
|
||||||
|
if not result.exit_code == 0:
|
||||||
|
_logger.error("exit_code: %s" % result.exit_code)
|
||||||
|
_logger.error("output: %s" % result.output)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
|
||||||
|
|
||||||
|
def build_ctx_from_config_file() -> dict:
|
||||||
|
env_folder_path = Path(".")
|
||||||
|
|
||||||
|
class context:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ctx = context()
|
||||||
|
setattr(ctx, "obj", {})
|
||||||
|
config_file_path = env_folder_path / "config.yml"
|
||||||
|
if not config_file_path.exists():
|
||||||
|
raise Exception("Configuration file not found %s" % config_file_path)
|
||||||
|
with open(config_file_path) as file:
|
||||||
|
config = yaml.safe_load(file)
|
||||||
|
ctx.obj["config"] = config
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
ctx.obj["env_folder_path"] = env_folder_path
|
||||||
|
ctx.obj["src_folder_path"] = env_folder_path / Path("src")
|
||||||
|
ctx.obj["postgres_folder_path"] = env_folder_path / Path(
|
||||||
|
"postgres_data/data"
|
||||||
|
)
|
||||||
|
return ctx
|
||||||
|
|
@ -1,30 +1,25 @@
|
||||||
import filecmp
|
import filecmp
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from click.testing import CliRunner
|
from . import cli_runner_invoke, move_to_test_folder
|
||||||
from plumbum.cmd import mkdir
|
|
||||||
|
|
||||||
from odoo_openupgrade_wizard.cli import main
|
|
||||||
|
|
||||||
|
|
||||||
def test_cli_init():
|
def test_cli_init():
|
||||||
output_folder_path = Path("./tests/output_01")
|
move_to_test_folder()
|
||||||
expected_folder_path = Path("./tests/output_01_expected")
|
expected_folder_path = Path("../output_expected").absolute()
|
||||||
mkdir([output_folder_path, "--parents"])
|
|
||||||
result = CliRunner().invoke(
|
cli_runner_invoke(
|
||||||
main,
|
|
||||||
[
|
[
|
||||||
"--env-folder=%s" % output_folder_path,
|
"--log-level=DEBUG",
|
||||||
"init",
|
"init",
|
||||||
"--initial-release=9.0",
|
"--project-name=test-cli",
|
||||||
"--final-release=12.0",
|
"--initial-release=13.0",
|
||||||
"--extra-repository="
|
"--final-release=14.0",
|
||||||
"OCA/web,OCA/server-tools,GRAP/grap-odoo-incubator",
|
"--extra-repository=OCA/web,OCA/server-tools",
|
||||||
],
|
]
|
||||||
)
|
)
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
assert filecmp.cmp(
|
assert filecmp.cmp(
|
||||||
output_folder_path / Path("config.yml"),
|
Path("config.yml"),
|
||||||
expected_folder_path / Path("config.yml"),
|
expected_folder_path / Path("config.yml"),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,40 +1,25 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from click.testing import CliRunner
|
from . import cli_runner_invoke, move_to_test_folder
|
||||||
from plumbum.cmd import mkdir
|
|
||||||
|
|
||||||
from odoo_openupgrade_wizard.cli import main
|
|
||||||
|
|
||||||
|
|
||||||
def test_cli_get_code():
|
def test_cli_get_code():
|
||||||
output_folder_path = Path("./tests/output_02")
|
move_to_test_folder()
|
||||||
mkdir([output_folder_path, "--parents"])
|
cli_runner_invoke(
|
||||||
|
|
||||||
# We initialize an env with only one version to avoid to git clone
|
|
||||||
# large data
|
|
||||||
CliRunner().invoke(
|
|
||||||
main,
|
|
||||||
[
|
[
|
||||||
"--env-folder=%s" % output_folder_path,
|
"--log-level=DEBUG",
|
||||||
"init",
|
|
||||||
"--initial-release=14.0",
|
|
||||||
"--final-release=14.0",
|
|
||||||
"--extra-repository=OCA/web",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
result = CliRunner().invoke(
|
|
||||||
main,
|
|
||||||
[
|
|
||||||
"--env-folder=%s" % output_folder_path,
|
|
||||||
"get-code",
|
"get-code",
|
||||||
],
|
]
|
||||||
)
|
)
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
openupgrade_path = output_folder_path / Path(
|
# Check V13
|
||||||
"./src/env_14.0/src/openupgrade"
|
openupgrade_path = Path("./src/env_13.0/src/openupgrade")
|
||||||
)
|
assert openupgrade_path.exists()
|
||||||
|
|
||||||
|
assert (openupgrade_path / Path("odoo")).exists()
|
||||||
|
|
||||||
|
# check V14
|
||||||
|
openupgrade_path = Path("./src/env_14.0/src/openupgrade")
|
||||||
|
|
||||||
assert openupgrade_path.exists()
|
assert openupgrade_path.exists()
|
||||||
|
|
||||||
|
|
|
||||||
24
tests/cli_03_docker_build_test.py
Normal file
24
tests/cli_03_docker_build_test.py
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
from odoo_openupgrade_wizard.tools_docker import get_docker_client
|
||||||
|
|
||||||
|
from . import cli_runner_invoke, move_to_test_folder
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_docker_build():
|
||||||
|
move_to_test_folder()
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"docker-build",
|
||||||
|
"--releases=13.0,14.0",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
docker_client = get_docker_client()
|
||||||
|
|
||||||
|
assert docker_client.images.get(
|
||||||
|
"odoo-openupgrade-wizard-image__test-cli__13.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert docker_client.images.get(
|
||||||
|
"odoo-openupgrade-wizard-image__test-cli__14.0"
|
||||||
|
)
|
||||||
54
tests/cli_04_run_test.py
Normal file
54
tests/cli_04_run_test.py
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_docker import get_docker_client
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import execute_sql_request
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_run():
|
||||||
|
move_to_test_folder()
|
||||||
|
db_name = "database_test_cli___run"
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"run",
|
||||||
|
"--step=1",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--init-modules=base",
|
||||||
|
"--stop-after-init",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that a subfolder filestore/DB_NAME has been created
|
||||||
|
db_filestore_path = Path("./filestore/filestore/%s" % db_name)
|
||||||
|
assert db_filestore_path.exists()
|
||||||
|
|
||||||
|
# Ensure that 'base' module is installed
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
request = (
|
||||||
|
"SELECT id"
|
||||||
|
" FROM ir_module_module"
|
||||||
|
" WHERE state ='installed'"
|
||||||
|
" AND name='base';"
|
||||||
|
)
|
||||||
|
assert execute_sql_request(ctx, request, database=db_name)
|
||||||
|
|
||||||
|
# Ensure that 'point_of_sale' module is not installed
|
||||||
|
request = (
|
||||||
|
"SELECT id"
|
||||||
|
" FROM ir_module_module"
|
||||||
|
" WHERE state ='installed'"
|
||||||
|
" AND name='point_of_sale';"
|
||||||
|
)
|
||||||
|
assert not execute_sql_request(ctx, request, database=db_name)
|
||||||
|
|
||||||
|
# Ensure that all the containers are removed
|
||||||
|
docker_client = get_docker_client()
|
||||||
|
assert not docker_client.containers.list(
|
||||||
|
all=True, filters={"name": "odoo-openupgrade-wizard"}
|
||||||
|
)
|
||||||
58
tests/cli_05_execute_script_python_test.py
Normal file
58
tests/cli_05_execute_script_python_test.py
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from plumbum.cmd import cp
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import execute_sql_request
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_execute_script_python():
|
||||||
|
move_to_test_folder()
|
||||||
|
extra_script_path = Path("../extra_script/click_odoo_test.py").absolute()
|
||||||
|
cp(
|
||||||
|
extra_script_path,
|
||||||
|
Path("click_odoo_test.py"),
|
||||||
|
)
|
||||||
|
|
||||||
|
db_name = "database_test_cli___execute_script_python"
|
||||||
|
|
||||||
|
# Install Odoo on V13 with base installed
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"run",
|
||||||
|
"--step=1",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--init-modules=base",
|
||||||
|
"--stop-after-init",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Compute partners quantity
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
request = "SELECT count(*)" " FROM res_partner;"
|
||||||
|
partner_quantity_before = int(
|
||||||
|
execute_sql_request(ctx, request, database=db_name)[0][0]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute Custom Python Script
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"execute-script-python",
|
||||||
|
"--step=1",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--script-file-path=click_odoo_test.py",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
partner_quantity_after = int(
|
||||||
|
execute_sql_request(ctx, request, database=db_name)[0][0]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that partners have been created by click_odoo_test.py
|
||||||
|
assert partner_quantity_after == (partner_quantity_before + 10)
|
||||||
48
tests/cli_06_execute_script_sql_test.py
Normal file
48
tests/cli_06_execute_script_sql_test.py
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from plumbum.cmd import cp
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import (
|
||||||
|
ensure_database,
|
||||||
|
execute_sql_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_execute_script_sql():
|
||||||
|
move_to_test_folder()
|
||||||
|
extra_script_path = Path(
|
||||||
|
"../extra_script/pre-migration-custom_test.sql"
|
||||||
|
).absolute()
|
||||||
|
|
||||||
|
# Deploy SQL Script
|
||||||
|
destination_path = Path("scripts/step_01__update__13.0")
|
||||||
|
cp([extra_script_path, destination_path])
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
|
||||||
|
# Reset database
|
||||||
|
db_name = "database_test_cli___execute_script_sql"
|
||||||
|
ensure_database(ctx, db_name, state="absent")
|
||||||
|
ensure_database(ctx, db_name, state="present")
|
||||||
|
|
||||||
|
# TODO call with script-file-path
|
||||||
|
# to avoid to copy file in scripts/step_xxx folder
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"execute-script-sql",
|
||||||
|
"--step=1",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that the request has been done correctly
|
||||||
|
request = "SELECT name from city order by id;"
|
||||||
|
result = execute_sql_request(ctx, request, database=db_name)
|
||||||
|
|
||||||
|
assert result == [["Chicago"], ["Cavalaire Sur Mer"]]
|
||||||
62
tests/cli_07_upgrade_test.py
Normal file
62
tests/cli_07_upgrade_test.py
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import (
|
||||||
|
ensure_database,
|
||||||
|
execute_sql_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_upgrade():
|
||||||
|
move_to_test_folder()
|
||||||
|
|
||||||
|
# Initialize database
|
||||||
|
db_name = "database_test_cli___upgrade"
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
ensure_database(ctx, db_name, state="absent")
|
||||||
|
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"run",
|
||||||
|
"--step=1",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--init-modules=base",
|
||||||
|
"--stop-after-init",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that 'base' module is installed at 13.0
|
||||||
|
request = (
|
||||||
|
"SELECT latest_version"
|
||||||
|
" FROM ir_module_module"
|
||||||
|
" WHERE state ='installed'"
|
||||||
|
" AND name='base';"
|
||||||
|
)
|
||||||
|
latest_version = execute_sql_request(ctx, request, database=db_name)
|
||||||
|
|
||||||
|
assert latest_version[0][0].startswith("13.")
|
||||||
|
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"upgrade",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--first-step=1",
|
||||||
|
"--last-step=3",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that 'base' module is installed at 14.0
|
||||||
|
request = (
|
||||||
|
"SELECT latest_version"
|
||||||
|
" FROM ir_module_module"
|
||||||
|
" WHERE state ='installed'"
|
||||||
|
" AND name='base';"
|
||||||
|
)
|
||||||
|
latest_version = execute_sql_request(ctx, request, database=db_name)
|
||||||
|
|
||||||
|
assert latest_version[0][0].startswith("14.")
|
||||||
13
tests/cli_08_estimate_workload_test.py
Normal file
13
tests/cli_08_estimate_workload_test.py
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
from . import cli_runner_invoke, move_to_test_folder
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_estimate_workload():
|
||||||
|
move_to_test_folder()
|
||||||
|
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"estimate-workload",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
# TODO, write test
|
||||||
39
tests/cli_20_install_from_csv_test.py
Normal file
39
tests/cli_20_install_from_csv_test.py
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
from odoo_openupgrade_wizard.tools_postgres import (
|
||||||
|
ensure_database,
|
||||||
|
execute_sql_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_install_from_csv():
|
||||||
|
move_to_test_folder()
|
||||||
|
|
||||||
|
# Initialize database
|
||||||
|
db_name = "database_test_cli___install_from_csv"
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
ensure_database(ctx, db_name, state="absent")
|
||||||
|
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"install-from-csv",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure that 'account' is installed
|
||||||
|
# and also 'product', by dependencies
|
||||||
|
request = (
|
||||||
|
"SELECT count(*)"
|
||||||
|
" FROM ir_module_module"
|
||||||
|
" WHERE state ='installed'"
|
||||||
|
" AND name in ('product', 'account');"
|
||||||
|
)
|
||||||
|
module_qty = int(execute_sql_request(ctx, request, database=db_name)[0][0])
|
||||||
|
|
||||||
|
assert module_qty == 2
|
||||||
41
tests/cli_21_generate_module_analysis_test.py
Normal file
41
tests/cli_21_generate_module_analysis_test.py
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from odoo_openupgrade_wizard.tools_odoo import get_odoo_env_path
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
build_ctx_from_config_file,
|
||||||
|
cli_runner_invoke,
|
||||||
|
move_to_test_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_generate_module_analysis():
|
||||||
|
move_to_test_folder()
|
||||||
|
db_name = "database_test_cli___generate_module_analysis"
|
||||||
|
|
||||||
|
ctx = build_ctx_from_config_file()
|
||||||
|
# identify main analysis file of openupgrade
|
||||||
|
analysis_file_path = get_odoo_env_path(ctx, {"release": 14.0}) / Path(
|
||||||
|
"src/openupgrade/openupgrade_scripts/scripts"
|
||||||
|
"/base/14.0.1.3/upgrade_general_log.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
# We remove this file and run the analysis
|
||||||
|
try:
|
||||||
|
analysis_file_path.unlink()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
analysis_file_path
|
||||||
|
cli_runner_invoke(
|
||||||
|
[
|
||||||
|
"--log-level=DEBUG",
|
||||||
|
"generate-module-analysis",
|
||||||
|
"--step=2",
|
||||||
|
"--database=%s" % db_name,
|
||||||
|
"--modules=base",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# The file should has been recreated by the analysis command
|
||||||
|
assert analysis_file_path.exists()
|
||||||
11
tests/data/extra_script/click_odoo_test.py
Normal file
11
tests/data/extra_script/click_odoo_test.py
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
_logger.info("click_odoo_test.py : Begin of script ...")
|
||||||
|
|
||||||
|
env = env # noqa: F821
|
||||||
|
|
||||||
|
for i in range(0, 10):
|
||||||
|
env["res.partner"].create({"name": "Partner #%d" % (i)})
|
||||||
|
|
||||||
|
_logger.info("click_odoo_test.py : End of script.")
|
||||||
46
tests/data/extra_script/post-migration-custom_test.py
Normal file
46
tests/data/extra_script/post-migration-custom_test.py
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
# Unused for the time being
|
||||||
|
|
||||||
|
# def _check_orm_usage(self):
|
||||||
|
# # Classic ORM usage Checks
|
||||||
|
# partners = self.browse_by_search("res.partner")
|
||||||
|
|
||||||
|
# self.browse_by_create("res.partner", {"name": "New Partner"})
|
||||||
|
|
||||||
|
# new_partners = self.browse_by_search("res.partner")
|
||||||
|
|
||||||
|
# if len(partners) + 1 != len(new_partners):
|
||||||
|
# raise Exception("Creation of partner failed.")
|
||||||
|
|
||||||
|
|
||||||
|
# def _check_modules(self):
|
||||||
|
# if self.check_modules_installed("sale"):
|
||||||
|
# self.uninstall_modules("sale")
|
||||||
|
|
||||||
|
# self.install_modules("sale")
|
||||||
|
|
||||||
|
# if not self.check_modules_installed("sale"):
|
||||||
|
# raise Exception("'sale' module should be installed")
|
||||||
|
|
||||||
|
# self.uninstall_modules(["product"])
|
||||||
|
|
||||||
|
# if self.check_modules_installed("sale"):
|
||||||
|
# raise Exception(
|
||||||
|
# "'sale' module should not be installed"
|
||||||
|
# " after uninstallation of product"
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
# def _check_models(self):
|
||||||
|
# if not self.check_models_present("res.partner"):
|
||||||
|
# raise Exception("'res.partner' model should be present.")
|
||||||
|
|
||||||
|
# if self.check_models_present("res.partner.unexisting.model"):
|
||||||
|
# raise Exception(
|
||||||
|
# "'res.partner.unexisting.model' model" " should not be present."
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
# def main(self):
|
||||||
|
# _check_orm_usage(self)
|
||||||
|
# _check_modules(self)
|
||||||
|
# _check_models(self)
|
||||||
7
tests/data/extra_script/pre-migration-custom_test.sql
Normal file
7
tests/data/extra_script/pre-migration-custom_test.sql
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
CREATE TABLE city (
|
||||||
|
id int,
|
||||||
|
name varchar
|
||||||
|
);
|
||||||
|
|
||||||
|
insert INTO city (id, name) values (1, 'Chicago');
|
||||||
|
insert INTO city (id, name) values (2, 'Cavalaire Sur Mer');
|
||||||
32
tests/data/output_expected/config.yml
Normal file
32
tests/data/output_expected/config.yml
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
project_name: test-cli
|
||||||
|
|
||||||
|
postgres_image_name: postgres:13
|
||||||
|
postgres_container_name: test-cli-db
|
||||||
|
|
||||||
|
odoo_host_xmlrpc_port: 9069
|
||||||
|
odoo_default_country_code: FR
|
||||||
|
|
||||||
|
odoo_versions:
|
||||||
|
|
||||||
|
- release: 13.0
|
||||||
|
|
||||||
|
- release: 14.0
|
||||||
|
|
||||||
|
|
||||||
|
migration_steps:
|
||||||
|
|
||||||
|
- name: 1
|
||||||
|
release: 13.0
|
||||||
|
action: update
|
||||||
|
complete_name: step_01__update__13.0
|
||||||
|
|
||||||
|
- name: 2
|
||||||
|
release: 14.0
|
||||||
|
action: upgrade
|
||||||
|
complete_name: step_02__upgrade__14.0
|
||||||
|
|
||||||
|
- name: 3
|
||||||
|
release: 14.0
|
||||||
|
action: update
|
||||||
|
complete_name: step_03__update__14.0
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
odoo_versions:
|
|
||||||
|
|
||||||
- release: 9.0
|
|
||||||
|
|
||||||
- release: 10.0
|
|
||||||
|
|
||||||
- release: 11.0
|
|
||||||
|
|
||||||
- release: 12.0
|
|
||||||
|
|
||||||
|
|
||||||
migration_steps:
|
|
||||||
|
|
||||||
- name: 1
|
|
||||||
release: 9.0
|
|
||||||
action: update
|
|
||||||
complete_name: step_01__update__9.0
|
|
||||||
|
|
||||||
- name: 2
|
|
||||||
release: 10.0
|
|
||||||
action: upgrade
|
|
||||||
complete_name: step_02__upgrade__10.0
|
|
||||||
|
|
||||||
- name: 3
|
|
||||||
release: 11.0
|
|
||||||
action: upgrade
|
|
||||||
complete_name: step_03__upgrade__11.0
|
|
||||||
|
|
||||||
- name: 4
|
|
||||||
release: 12.0
|
|
||||||
action: upgrade
|
|
||||||
complete_name: step_04__upgrade__12.0
|
|
||||||
|
|
||||||
- name: 6
|
|
||||||
release: 12.0
|
|
||||||
action: update
|
|
||||||
complete_name: step_06__update__12.0
|
|
||||||
Loading…
Reference in New Issue
Block a user