Merge branch 'dev-3' into 'main'

[ADD] analyse-workload (2/2) WIP

See merge request odoo-openupgrade-wizard/odoo-openupgrade-wizard!3
This commit is contained in:
LE GAL SYLVAIN 2022-06-17 07:32:10 +00:00
commit b4b00a69e6
20 changed files with 1231 additions and 273 deletions

63
.gitlab-ci.old.yml Normal file
View File

@ -0,0 +1,63 @@
image: docker:19.03
services:
- docker:dind
stages:
- linting
- tests
# TODO, fix me : call all the pre-commit stuff instead.
black:
stage: linting
image: python
script:
# Install pipx
- pip install --user pipx
- python -m pipx ensurepath
- source ~/.profile
# Install black
- pipx install black
- black --version
# Log
# Call black Check
- black --check .
pytest:
stage: tests
script:
# Install the the version 3.8. (the version 3.9 is the latest available)
# however, docker 19.03 comes with python3.8 and docker 20.10 comes with python3.10
- apk add python3==~3.8 python3-dev==~3.8
- apk add gcc g++ libffi-dev
- apk add git
- python3 -m venv /.gitlab-ci-venv
- source /.gitlab-ci-venv/bin/activate
- pip install --upgrade pip
- pip install poetry
- poetry --version
- poetry install -v
- echo $PATH
- echo $PYTHONPATH
- poetry run pytest --version
- poetry run pytest --verbosity=2 --exitfirst --cov odoo_openupgrade_wizard
tests/cli_01_init_test.py
tests/cli_02_get_code_test.py
tests/cli_03_docker_build_test.py
tests/cli_04_run_test.py
tests/cli_05_execute_script_python_test.py
tests/cli_06_execute_script_sql_test.py
tests/cli_07_upgrade_test.py
tests/cli_08_estimate_workload_test.py
# Disabled test on gitlab-ci :
# The following tests should work locally but doesn't on gitlab-ci
# because calling OdooRPC on mounted container
# in a container (gitlab) doesn't work.
# tests/cli_20_install_from_csv_test.py
# tests/cli_21_generate_module_analysis_test.py

View File

@ -1,42 +1,26 @@
image: docker:19.03
services:
- docker:dind
stages:
- linting
- tests
- lint
- test
# TODO, fix me : call all the pre-commit stuff instead.
black:
stage: linting
image: python
pre-commit:
stage: lint
tags:
- shell
only:
- merge_requests
script:
# Install pipx
- pip install --user pipx
- python -m pipx ensurepath
- source ~/.profile
# Install black
- pipx install black
- black --version
# Log
# Call black Check
- black --check .
- pre-commit run --all --show-diff-on-failure --verbose --color always
pytest:
stage: tests
stage: test
tags:
- shell
only:
- merge_requests
script:
# Install the the version 3.8. (the version 3.9 is the latest available)
# however, docker 19.03 comes with python3.8 and docker 20.10 comes with python3.10
- apk add python3==~3.8 python3-dev==~3.8
- apk add gcc g++ libffi-dev
- apk add git
- python3 -m venv /.gitlab-ci-venv
- source /.gitlab-ci-venv/bin/activate
- pip install --upgrade pip
- python3 -m venv ./.gitlab-ci-venv
- source ./.gitlab-ci-venv/bin/activate
# - pip install --upgrade pip
- pip install poetry
- poetry --version
- poetry install -v
@ -53,11 +37,5 @@ pytest:
tests/cli_06_execute_script_sql_test.py
tests/cli_07_upgrade_test.py
tests/cli_08_estimate_workload_test.py
# Disabled test on gitlab-ci :
# The following tests should work locally but doesn't on gitlab-ci
# because calling OdooRPC on mounted container
# in a container (gitlab) doesn't work.
# tests/cli_20_install_from_csv_test.py
# tests/cli_21_generate_module_analysis_test.py
tests/cli_20_install_from_csv_test.py
tests/cli_21_generate_module_analysis_test.py

View File

@ -23,7 +23,7 @@ repos:
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8

View File

@ -30,6 +30,7 @@ docker exec db psql --username=odoo --dbname=test_v12 -c "update res_partner set
in ``cli_B_03_run_test.py``...
- add
# Try gitlab runner
@ -38,3 +39,8 @@ curl -LJO "https://gitlab-runner-downloads.s3.amazonaws.com/latest/deb/gitlab-ru
sudo dpkg -i gitlab-runner_amd64.deb
(https://docs.gitlab.com/runner/install/linux-manually.html)
# TODO:
- check dynamic user id with
https://github.com/camptocamp/docker-odoo-project/blob/master/bin/docker-entrypoint.sh

View File

@ -13,8 +13,10 @@ upgrade of Odoo Community Edition. (formely OpenERP).
It works with Openupgrade OCA tools. (https://github.com/oca/openupgrade)
this tool is useful for complex migrations:
- skip several versions
- complex custom code
- migrate several versions
- take advantage of the migration to install / uninstall modules
- execute sql requests or click-odoo scripts between each migration
- analyse workload
It will create a migration environment (with all the code available)
and provides helpers to run (and replay) migrations until it works.
@ -25,8 +27,17 @@ and provides helpers to run (and replay) migrations until it works.
# Installation
``pipx install odoo-openupgrade-wizard``.
**Prerequites**
* You should have docker installed on your system
* the tools run on debian system
**Installation**
``pipx install odoo-openupgrade-wizard``
(coming soon. Not still deployed on pypi ! See alternative installation
in ``DEVELOP.md`` file.)
# Usage
@ -46,24 +57,21 @@ Initialize a folder to make a migration from a 10.0 and a 12.0 database.
This will generate the following structure :
```
config.yml
modules.csv
filestore/
log/
2022_03_25__23_12_41__init.log
...
postgres_data/
scripts/
step_1__update__10.0/
pre-migration.sql
post-migration.py
step_2__upgrade__11.0/
pre-migration.sql
post-migration.py
step_2__upgrade__12.0/
pre-migration.sql
post-migration.py
...
step_3__upgrade__12.0/
...
step_4__update__12.0/
pre-migration.sql
post-migration.py
...
src/
env_10.0/
debian_requirements.txt
@ -72,13 +80,12 @@ src/
python_requirements.txt
repos.yml
src/
odoo/
openupgrade/
env_11.0/
...
env_12.0/
...
config.yml
modules.csv
```
* ``config.xml`` is the main configuration file of your project.
@ -90,11 +97,16 @@ src/
* ``log`` folder will contains all the log of the ``odoo-openupgrade-wizard``
and the logs of the odoo instance that will be executed.
* ``filestore`` folder will contains the filestore of the odoo database(s)
* ``postgres_data`` folder will be used by postgres docker image to store
database.
* ``scripts`` folder contains a folder per migration step. In each step folder:
- ``pre-migration.sql`` can contains extra SQL queries you want to execute
before beginning the step.
- ``post-migration.py`` can contains extra python command to execute
after the execution of the step. (the orm will be available)
after the execution of the step.
Script will be executed with ``click-odoo`` command. All the ORM is available
via the ``env`` variable.
@ -115,16 +127,22 @@ src/
required to run the odoo instance.
- ``odoo.cfg`` file. Add here extra configuration required for your custom modules.
the classical keys (``db_host``, ``db_port``, etc...) are automatically autogenerated.
the classical keys (``db_host``, ``db_port``, etc...) are automatically
autogenerated.
At this step, you should change the autogenerated files.
You can use default files, if you have a very simple odoo instance without custom code,
extra repositories, or dependencies...
**Note:**
- In your repos.yml, preserve ``openupgrade`` and ``server-tools`` repositories
to have all the features of the librairies available.
## ``odoo-openupgrade-wizard get-code``
Prerequites : init
```
odoo-openupgrade-wizard get-code
```
@ -150,6 +168,8 @@ odoo-openupgrade-wizard get-code --releases 10.0,11.0
## ``odoo-openupgrade-wizard docker-build``
Prerequites : init + get-code
This will build local docker images that will be used in the following steps.
This script will pull official odoo docker images, defined in the ``Dockerfile`` of
@ -179,7 +199,9 @@ odoo-openupgrade-wizard-image---my-customer-10-12---10.0 latest 9d94dce2bd4
## ``odoo-openupgrade-wizard run``
## ``odoo-openupgrade-wizard run`` (BETA)
Prerequites : init + get-code + build
```
odoo-openupgrade-wizard run\
@ -206,6 +228,8 @@ at your host, at the following url : http://localhost:9069
## ``odoo-openupgrade-wizard install-from-csv``
Prerequites : init + get-code + build
```
odoo-openupgrade-wizard install-from-csv\
--database DB_NAME
@ -218,7 +242,9 @@ The database will be created, if it doesn't exists.
## ``odoo-openupgrade-wizard upgrade``
## ``odoo-openupgrade-wizard upgrade`` (BETA)
Prerequites : init + get-code + build
```
odoo-openupgrade-wizard upgrade\
@ -243,7 +269,9 @@ For each step, it will :
## ``odoo-openupgrade-wizard generate-module-analysis``
## ``odoo-openupgrade-wizard generate-module-analysis`` (BETA)
Prerequites : init + get-code + build
```
odoo-openupgrade-wizard generate-module-analysis\
@ -257,3 +285,19 @@ and the previous version. It will generate analysis_file.txt files present
in OpenUpgrade project.
You can also use this fonction to analyze differences for custom / OCA modules
between several versions, in case of refactoring.
## ``odoo-openupgrade-wizard estimate-workload``
Prerequites : init + get-code
```
odoo-openupgrade-wizard estimate-workload
```
Generate an HTML file name ``analysis.html`` with all the information regarding
the work to do for the migration.
- checks that the modules are present in each version. (by managing the
renaming or merging of modules)
- check that the analysis and migration have been done for the official
modules present in odoo/odoo

View File

@ -5,7 +5,7 @@ from odoo_openupgrade_wizard.cli_options import (
get_odoo_versions_from_options,
releases_options,
)
from odoo_openupgrade_wizard.tools_docker import build_image
from odoo_openupgrade_wizard.tools_docker import build_image, pull_image
from odoo_openupgrade_wizard.tools_odoo import (
get_docker_image_tag,
get_odoo_env_path,
@ -18,6 +18,10 @@ from odoo_openupgrade_wizard.tools_odoo import (
def docker_build(ctx, releases):
"""Build Odoo Docker Images. (One image per release)"""
# Pull DB image
pull_image(ctx.obj["config"]["postgres_image_name"])
# Build images for each odoo version
for odoo_version in get_odoo_versions_from_options(ctx, releases):
logger.info(
"Building Odoo docker image for release '%s'. "

View File

@ -4,6 +4,7 @@ from pathlib import Path
import click
from odoo_openupgrade_wizard import templates
from odoo_openupgrade_wizard.tools_odoo import get_odoo_modules_from_csv
from odoo_openupgrade_wizard.tools_odoo_module import Analysis
from odoo_openupgrade_wizard.tools_system import (
ensure_file_exists_from_template,
@ -18,11 +19,29 @@ from odoo_openupgrade_wizard.tools_system import (
),
default="./analysis.html",
)
@click.option(
"--extra-modules",
"extra_modules_list",
# TODO, add a callback to check the quality of the argument
help="Coma separated modules to analyse. If not set, the modules.csv"
" file will be used to define the list of module to analyse."
"Ex: 'account,product,base'",
)
@click.pass_context
def estimate_workload(ctx, analysis_file_path):
def estimate_workload(ctx, analysis_file_path, extra_modules_list):
# Analyse
analysis = Analysis(ctx)
if extra_modules_list:
module_list = extra_modules_list.split(",")
else:
module_list = get_odoo_modules_from_csv(ctx.obj["module_file_path"])
analysis.analyse_module_version(ctx, module_list)
analysis.analyse_missing_module()
analysis.analyse_openupgrade_state(ctx)
analysis.estimate_workload(ctx)
# Make some clean to display properly
analysis.modules = sorted(analysis.modules)
@ -30,7 +49,7 @@ def estimate_workload(ctx, analysis_file_path):
# TODO, make
ensure_file_exists_from_template(
Path(analysis_file_path),
templates.ANALYSIS_TEMPLATE,
templates.ANALYSIS_HTML_TEMPLATE,
ctx=ctx,
analysis=analysis,
current_date=datetime.now().strftime("%d/%m/%Y %H:%M:%S"),

View File

@ -7,7 +7,9 @@ def releases_options(function):
"--releases",
type=str,
help="Coma-separated values of odoo releases for which"
" you want to perform the operation.",
" you want to perform the operation."
" Let empty to perform the operation on all the releases"
" of the project",
)(function)
return function

View File

@ -22,6 +22,7 @@ from odoo_openupgrade_wizard.tools_postgres import ensure_database
" function to stop.",
)
@click.option(
"-i",
"--init-modules",
type=str,
help="List of modules to install. Equivalent to -i odoo options.",

View File

@ -228,3 +228,60 @@ def generate_analysis_files(
logger.info("> Launch analysis. This can take a while ...")
analysis.analyze()
def get_apriori_file_relative_path(migration_step: dict) -> (str, Path):
"""Return the module name and the relative file path of
the apriori.py file that contains all the rename and
the merge information for a given upgrade."""
if migration_step["release"] < 14.0:
return ("openupgrade_records", Path("lib/apriori.py"))
else:
return ("openupgrade_scripts", Path("apriori.py"))
def get_coverage_relative_path(migration_step: dict) -> (str, Path):
"""Return the path of the coverage file."""
if migration_step["release"] < 10.0:
base_path = Path("src/openupgrade/openerp/openupgrade/doc/source")
elif migration_step["release"] < 14.0:
base_path = Path("src/openupgrade/odoo/openupgrade/doc/source")
else:
base_path = Path("src/openupgrade/docsource")
previous_release = migration_step["release"] - 1
return base_path / Path(
"modules%s-%s.rst"
% (
("%.1f" % previous_release).replace(".", ""),
("%.1f" % migration_step["release"]).replace(".", ""),
)
)
def get_openupgrade_analysis_files(
odoo_env_path: Path, release: float
) -> dict:
"""return a dictionnary of module_name : path,
where module_name is the name of each module of a release
and and path is the path of the migration_analysis.txt file
of the module"""
result = {}
if release < 14.0:
base_name = "openupgrade_analysis.txt"
else:
base_name = "upgrade_analysis.txt"
files = [
x
for x in sorted(odoo_env_path.rglob("**/*.txt"))
if x.name == base_name
]
for file in files:
if file.parent.parent == "migrations":
module_name = file.parent.parent.parent.name
else:
module_name = file.parent.parent.name
result[module_name] = file
return result

View File

@ -7,6 +7,7 @@ postgres_container_name: {{project_name}}-db
odoo_host_xmlrpc_port: 9069
odoo_default_country_code: FR
odoo_versions:
{% for odoo_version in odoo_versions %}
- release: {{ odoo_version['release'] }}
@ -19,6 +20,24 @@ migration_steps:
action: {{ step['action'] }}
complete_name: {{ step['complete_name'] }}
{% endfor %}
workload_settings:
# porting a module requires 45 minutes minimaly
port_minimal_time: 45
# a migration cost more for each version
port_per_version: 15
# Porting 120 lines of Python code costs 1 hour
port_per_python_line_time: 0.5
# Porting 120 lines of Python code costs 1 hour
port_per_javascript_line_time: 0.5
# Porting 10 lines of XML costs 1 minute
port_per_xml_line_time: 0.10
"""
REPO_YML_TEMPLATE = """
@ -126,13 +145,16 @@ GIT_IGNORE_CONTENT = """
!.gitignore
"""
# TODO, this value are usefull for test for analyse between 13 and 14.
# move that values in data/extra_script/modules.csv
# and let this template with only 'base' module.
MODULES_CSV_TEMPLATE = """
base,Base
account,Account Module
web_responsive,Web Responsive Module
"""
ANALYSIS_TEMPLATE = """
ANALYSIS_HTML_TEMPLATE = """
<html>
<body>
<h1>Migration Analysis</h1>
@ -155,18 +177,118 @@ ANALYSIS_TEMPLATE = """
</tbody>
</table>
<h2>Summary</h2>
<table border="1" width="100%">
<thead>
<tr>
<th> - </th>
<th>Module Type</th>
<th>Module Quantity</th>
<th>Remaining Hours</th>
</tr>
</thead>
<tbody>
{%- for odoo_module in analysis.modules -%}
<tr>
<td>{{odoo_module.name}} ({{odoo_module.module_type}})
</td>
<td>Odoo</td>
<td>{{ analysis.get_module_qty("odoo") }}</td>
<td>{{ analysis.workload_hour_text("odoo") }}</td>
</tr>
<tr>
<td>OCA</td>
<td>{{ analysis.get_module_qty("OCA") }}</td>
<td>{{ analysis.workload_hour_text("OCA") }}</td>
</tr>
<tr>
<td>Custom</td>
<td>{{ analysis.get_module_qty("custom") }}</td>
<td>{{ analysis.workload_hour_text("custom") }}</td>
</tr>
</tbody>
<tfood>
<tr>
<th>Total</th>
<td>{{ analysis.get_module_qty() }}</td>
<td>{{ analysis.workload_hour_text() }}</td>
</tr>
</tfood>
</table>
<h2>Details</h2>
<table border="1" width="100%">
<thead>
<tr>
<th>&nbsp;</th>
{%- for odoo_version in ctx.obj["config"]["odoo_versions"] -%}
<th>{{ odoo_version["release"] }}</th>
{% endfor %}
</tr>
</thead>
<tbody>
{% set ns = namespace(
current_repository='',
current_module_type='',
) %}
{% for odoo_module in analysis.modules %}
<!-- ---------------------- -->
<!-- Handle New Module Type -->
<!-- ---------------------- -->
{% if (
ns.current_module_type != odoo_module.module_type
and odoo_module.module_type != 'odoo') %}
{% set ns.current_module_type = odoo_module.module_type %}
<tr>
<th colspan="{{1 + ctx.obj["config"]["odoo_versions"]|length}}">
{{ ns.current_module_type}}
</th>
<tr>
{% endif %}
<!-- -------------------- -->
<!-- Handle New Repository-->
<!-- -------------------- -->
{% if ns.current_repository != odoo_module.repository %}
{% set ns.current_repository = odoo_module.repository %}
<tr>
<th colspan="{{1 + ctx.obj["config"]["odoo_versions"]|length}}">
{{ ns.current_repository}}
</th>
<tr>
{% endif %}
<!-- -------------------- -->
<!-- Display Module Line -->
<!-- -------------------- -->
<tr>
<td>{{odoo_module.name}}
</td>
{% for release in odoo_module.analyse.all_releases %}
{% set module_version = odoo_module.get_module_version(release) %}
{% if module_version %}
{% set size_text = module_version.get_size_text() %}
{% set workload = module_version.workload %}
<td style="background-color:{{module_version.get_bg_color()}};">
{{module_version.get_text()}}
{% if size_text %}
<span style="color:gray">({{ size_text}})</span>
{% endif %}
{% if workload %}
<span style="background-color:lightblue;">
({{ module_version.workload_hour_text()}})
</span>
{% endif %}
</td>
{% else %}
<td style="background-color:gray;">&nbsp;</td>
{% endif %}
{% endfor %}
</tr>
{% endfor %}
</tbody>

View File

@ -6,6 +6,11 @@ def get_docker_client():
return docker.from_env()
def pull_image(image_name):
client = get_docker_client()
client.images.pull(image_name)
def build_image(path, tag):
logger.debug(
"Building image named based on %s/Dockerfile."
@ -34,6 +39,12 @@ def run_container(
auto_remove=False,
):
client = get_docker_client()
if not client.images.list(filters={"reference": image_name}):
raise Exception(
"The image %s is not available on your system."
" Did you run 'odoo-openupgrade-wizard docker-build' ?"
% image_name
)
logger.debug("Launching Docker container named %s ..." % (image_name))
debug_docker_command = "docker run --name %s\\\n" % (container_name)

View File

@ -308,7 +308,7 @@ def execute_click_odoo_python_files(
def get_odoo_modules_from_csv(module_file_path: Path) -> list:
logger.info("Reading '%s' file ..." % module_file_path)
logger.debug("Reading '%s' file ..." % module_file_path)
module_names = []
csvfile = open(module_file_path, "r")
spamreader = csv.reader(csvfile, delimiter=",", quotechar='"')

View File

@ -85,30 +85,6 @@ class OdooInstance:
model = self.env[model_name]
return model.browse(model.create(vals))
def check_modules_installed(self, module_names) -> bool:
if type(module_names) == str:
module_names = [module_names]
installed_module_ids = self.env["ir.module.module"].search(
[
("name", "in", module_names),
("state", "=", "installed"),
]
)
return len(module_names) == len(installed_module_ids)
def check_models_present(
self, model_name, warning_if_not_found=True
) -> bool:
if self.env["ir.model"].search([("model", "=", model_name)]):
return True
else:
if warning_if_not_found:
logger.warning(
"Model '%s' not found."
" Part of the script will be skipped." % (model_name)
)
return False
def install_modules(self, module_names):
if type(module_names) == str:
module_names = [module_names]
@ -171,40 +147,3 @@ class OdooInstance:
% (prefix, module_name, module.state)
)
return installed_modules
def uninstall_modules(self, module_names):
if type(module_names) == str:
module_names = [module_names]
i = 0
for module_name in module_names:
i += 1
prefix = str(i) + "/" + str(len(module_names))
modules = self.browse_by_search(
"ir.module.module", [("name", "=", module_name)]
)
if not len(modules):
logger.error(
"%s - Module '%s': Not found." % (prefix, module_name)
)
continue
module = modules[0]
if module.state in (
"installed",
"to upgrade",
"to update",
"to remove",
):
logger.info(
"%s - Module '%s': Uninstalling .." % (prefix, module_name)
)
module.button_upgrade_cancel()
module.button_uninstall()
wizard = self.browse_by_create("base.module.upgrade", {})
wizard.upgrade_module()
else:
logger.error(
"%s - Module '%s': In the %s state."
" (Unable to uninstall)"
% (prefix, module_name, module.state)
)

View File

@ -1,66 +1,374 @@
import importlib
import os
from functools import total_ordering
from pathlib import Path
from git import Repo
from loguru import logger
from pygount import SourceAnalysis
from odoo_openupgrade_wizard.configuration_version_dependant import (
get_apriori_file_relative_path,
get_coverage_relative_path,
get_openupgrade_analysis_files,
)
from odoo_openupgrade_wizard.tools_odoo import (
get_odoo_addons_path,
get_odoo_env_path,
get_odoo_modules_from_csv,
)
class Analysis(object):
modules = []
def __init__(self, ctx):
module_names = get_odoo_modules_from_csv(ctx.obj["module_file_path"])
self.modules = []
self.initial_release = ctx.obj["config"]["odoo_versions"][0]["release"]
self.final_release = ctx.obj["config"]["odoo_versions"][-1]["release"]
self.all_releases = [
x["release"] for x in ctx.obj["config"]["odoo_versions"]
]
initial_release = ctx.obj["config"]["odoo_versions"][0]["release"]
def analyse_module_version(self, ctx, module_list):
self._generate_module_version_first_release(ctx, module_list)
for count in range(len(self.all_releases) - 1):
previous_release = self.all_releases[count]
current_release = self.all_releases[count + 1]
self._generate_module_version_next_release(
ctx, previous_release, current_release
)
def analyse_openupgrade_state(self, ctx):
logger.info("Parsing openupgrade module coverage for each migration.")
coverage_analysis = {}
for release in self.all_releases[1:]:
coverage_analysis[release] = {}
relative_path = get_coverage_relative_path({"release": release})
env_folder_path = get_odoo_env_path(ctx, {"release": release})
coverage_path = env_folder_path / relative_path
with open(coverage_path) as f:
lines = f.readlines()
for line in [x for x in lines if "|" in x]:
clean_line = (
line.replace("\n", "")
.replace("|del|", "")
.replace("|new|", "")
)
splited_line = [x.strip() for x in clean_line.split("|") if x]
if len(splited_line) == 2:
coverage_analysis[release][splited_line[0]] = splited_line[
1
]
if len(splited_line) == 3:
coverage_analysis[release][splited_line[0]] = (
splited_line[1] + " " + splited_line[2]
).strip()
elif len(splited_line) > 3:
raise ValueError(
"Incorrect value in openupgrade analysis file %s"
" for line %s" % (coverage_path, line)
)
for odoo_module in filter(
lambda x: x.module_type == "odoo", self.modules
):
for module_version in list(odoo_module.module_versions.values()):
module_version.analyse_openupgrade_state(coverage_analysis)
for release in self.all_releases[1:]:
for odoo_module in filter(
lambda x: x.module_type == "odoo", self.modules
):
odoo_env_path = get_odoo_env_path(ctx, {"release": release})
openupgrade_analysis_files = get_openupgrade_analysis_files(
odoo_env_path, release
)
module_version = odoo_module.get_module_version(release)
module_version.analyse_openupgrade_work(
openupgrade_analysis_files
)
def analyse_missing_module(self):
for odoo_module in filter(
lambda x: x.module_type != "odoo", self.modules
):
last_module_version = odoo_module.module_versions.get(
self.final_release, False
)
if (
not last_module_version.addon_path
and last_module_version.state
not in ["renamed", "merged", "normal_loss"]
):
last_module_version.analyse_missing_module()
def estimate_workload(self, ctx):
logger.info("Estimate workload ...")
for odoo_module in self.modules:
for module_version in odoo_module.module_versions.values():
module_version.estimate_workload(ctx)
def _generate_module_version_first_release(self, ctx, module_list):
not_found_modules = []
logger.info(
"Analyse version %s. (First Release)" % self.initial_release
)
# Instanciate a new odoo_module
for module_name in module_names:
repository_name = OdooModule.find_repository(
ctx, module_name, initial_release
for module_name in module_list:
addon_path = OdooModule.get_addon_path(
ctx, module_name, self.initial_release
)
if addon_path:
repository_name = OdooModule.get_repository_name(addon_path)
if (
repository_name
and "%s.%s" % (repository_name, module_name)
"%s.%s" % (repository_name, module_name)
not in self.modules
):
logger.debug(
"Discovering module '%s' in %s for release %s"
% (module_name, repository_name, initial_release)
% (module_name, repository_name, self.initial_release)
)
self.modules.append(
OdooModule(ctx, module_name, repository_name)
new_odoo_module = OdooModule(
ctx, self, module_name, repository_name
)
new_module_version = OdooModuleVersion(
self.initial_release, new_odoo_module, addon_path
)
new_odoo_module.module_versions.update(
{self.initial_release: new_module_version}
)
self.modules.append(new_odoo_module)
else:
logger.error(
"Module %s not found for release %s."
% (module_name, self.initial_release)
)
not_found_modules.append(module_name)
if not_found_modules:
raise ValueError(
"The modules %s have not been found in the release %s."
" Analyse can not be done. Please update your repos.yml"
" of your initial release to add repositories that"
" include the modules, then run again the command."
% (",".join(not_found_modules), self.initial_release)
)
def _generate_module_version_next_release(
self, ctx, previous_release, current_release
):
logger.info(
"Analyse change between %s and %s"
% (previous_release, current_release)
)
# Get changes between the two releases
(
apriori_module_name,
apriori_relative_path,
) = get_apriori_file_relative_path({"release": current_release})
apriori_module_path = OdooModule.get_addon_path(
ctx, apriori_module_name, current_release
)
apriori_absolute_path = (
apriori_module_path
/ Path(apriori_module_name)
/ apriori_relative_path
)
module_spec = importlib.util.spec_from_file_location(
"package", str(apriori_absolute_path)
)
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
renamed_modules = module.renamed_modules
merged_modules = module.merged_modules
for odoo_module in self.modules:
state = False
new_module_name = False
if odoo_module.name in renamed_modules:
state = "renamed"
new_module_name = renamed_modules[odoo_module.name]
logger.debug(
"%s -> %s : %s renamed into %s"
% (
previous_release,
current_release,
odoo_module.name,
new_module_name,
)
)
elif odoo_module.name in merged_modules:
state = "merged"
new_module_name = merged_modules[odoo_module.name]
logger.debug(
"%s -> %s : %s merged into %s"
% (
previous_release,
current_release,
odoo_module.name,
new_module_name,
)
)
# Handle new module
if state and new_module_name != odoo_module.name:
# Ensure that the module exists in self.modules
new_addon_path = OdooModule.get_addon_path(
ctx, new_module_name, current_release
)
if not new_addon_path:
raise ValueError(
"The module %s has not been found in the release %s."
" Analyse can not be done."
% (new_module_name, current_release)
)
else:
new_repository_name = OdooModule.get_repository_name(
new_addon_path
)
if (
"%s.%s" % (new_repository_name, new_module_name)
not in self.modules
):
logger.debug(
"Discovering module '%s' in %s for release %s"
% (
new_module_name,
new_repository_name,
current_release,
)
)
new_odoo_module = OdooModule(
ctx, self, new_module_name, new_repository_name
)
self.modules.append(new_odoo_module)
new_odoo_module.module_versions.update(
{
current_release: OdooModuleVersion(
current_release,
new_odoo_module,
new_addon_path,
)
}
)
# Get the previous release of the module
previous_module_version = odoo_module.get_module_version(
previous_release
)
# if the previous release has been renamed or merged
# the loss is normal
if previous_module_version and previous_module_version.state in [
"merged",
"renamed",
"normal_loss",
]:
state = "normal_loss"
new_addon_path = OdooModule.get_addon_path(
ctx, odoo_module.name, current_release
)
odoo_module.module_versions.update(
{
current_release: OdooModuleVersion(
current_release,
odoo_module,
new_addon_path,
state=state,
target_module=new_module_name,
)
}
)
def get_module_qty(self, module_type=False):
if module_type:
odoo_modules = [
x
for x in filter(
lambda x: x.module_type == module_type, self.modules
)
]
else:
odoo_modules = self.modules
return len(odoo_modules)
def workload_hour_text(self, module_type=False):
if module_type:
odoo_modules = [
x
for x in filter(
lambda x: x.module_type == module_type, self.modules
)
]
else:
odoo_modules = self.modules
total = 0
for odoo_module in odoo_modules:
for module_version in list(odoo_module.module_versions.values()):
total += module_version.workload
return "%d h" % (int(round(total / 60)))
@total_ordering
class OdooModule(object):
def __init__(self, ctx, analyse, module_name, repository_name):
self.analyse = analyse
self.name = module_name
self.repository = repository_name
self.unique_name = "%s.%s" % (repository_name, module_name)
self.module_versions = {}
if repository_name == "odoo/odoo":
self.module_type = "odoo"
elif repository_name.startswith("OCA"):
self.module_type = "OCA"
else:
self.module_type = "custom"
active = True
name = False
repository = False
module_type = False
unique_name = False
def get_module_version(self, current_release):
res = self.module_versions.get(current_release, False)
return res
@classmethod
def find_repository(cls, ctx, module_name, current_release):
def get_addon_path(cls, ctx, module_name, current_release):
"""Search the module in all the addons path of the current release
and return the addon path of the module, or False if not found.
For exemple find_repository(ctx, 'web_responsive', 12.0)
'/PATH_TO_LOCAL_ENV/src/OCA/web'
"""
# Try to find the repository that contains the module
main_path = get_odoo_env_path(ctx, {"release": current_release})
addons_path = get_odoo_addons_path(
ctx, main_path, {"release": current_release, "action": "update"}
ctx, main_path, {"release": current_release, "action": "upgrade"}
)
for addon_path in addons_path:
if (addon_path / module_name).exists():
return addon_path
return False
if str(addon_path).endswith("odoo/odoo/addons"):
@classmethod
def get_repository_name(cls, addon_path):
"""Given an addons path that contains odoo modules in a folder
that has been checkouted via git, return a repository name with the
following format org_name/repo_name.
For exemple 'OCA/web' or 'odoo/odoo'
"""
# TODO, make the code cleaner and more resiliant
# for the time being, the code will fail for
# - github url set with git+http...
# - gitlab url
# - if odoo code is not in a odoo folder in the repos.yml file...
if str(addon_path).endswith("odoo/odoo/addons") or str(
addon_path
).endswith("openupgrade/odoo/addons"):
path = addon_path.parent.parent
elif str(addon_path).endswith("odoo/addons"):
elif str(addon_path).endswith("odoo/addons") or str(
addon_path
).endswith("openupgrade/addons"):
path = addon_path.parent
else:
path = addon_path
@ -68,21 +376,10 @@ class OdooModule(object):
repository_name = repo.remotes[0].url.replace(
"https://github.com/", ""
)
return repository_name
return False
def __init__(self, ctx, module_name, repository_name):
self.name = module_name
self.repository = repository_name
if repository_name == "odoo/odoo":
self.module_type = "odoo"
elif repository_name.startswith("OCA"):
self.module_type = "OCA"
if repository_name.lower() == "oca/openupgrade":
return "odoo/odoo"
else:
self.module_type = "custom"
self.unique_name = "%s.%s" % (repository_name, module_name)
return repository_name
def __eq__(self, other):
if isinstance(other, str):
@ -94,9 +391,246 @@ class OdooModule(object):
if self.module_type != other.module_type:
if self.module_type == "odoo":
return True
elif self.module_type == "OCA" and self.module_type == "custom":
elif self.module_type == "OCA" and other.module_type == "custom":
return True
else:
return False
elif self.repository != other.repository:
return self.repository < other.repository
else:
return self.name < other.name
class OdooModuleVersion(object):
_exclude_directories = [
"lib",
"demo",
"test",
"tests",
"doc",
"description",
]
_exclude_files = ["__openerp__.py", "__manifest__.py"]
_file_extensions = [".py", ".xml", ".js"]
def __init__(
self,
release,
odoo_module,
addon_path,
state=False,
target_module=False,
):
self.release = release
self.odoo_module = odoo_module
self.addon_path = addon_path
self.state = state
self.target_module = target_module
self.openupgrade_state = ""
self.python_code = 0
self.xml_code = 0
self.javascript_code = 0
self.workload = 0
self.openupgrade_model_lines = 0
self.openupgrade_field_lines = 0
self.openupgrade_xml_lines = 0
def get_last_existing_version(self):
versions = list(self.odoo_module.module_versions.values())
return [x for x in filter(lambda x: x.addon_path, versions)][-1]
def estimate_workload(self, ctx):
settings = ctx.obj["config"]["workload_settings"]
port_minimal_time = settings["port_minimal_time"]
port_per_version = settings["port_per_version"]
port_per_python_line_time = settings["port_per_python_line_time"]
port_per_javascript_line_time = settings[
"port_per_javascript_line_time"
]
port_per_xml_line_time = settings["port_per_xml_line_time"]
if self.state in ["merged", "renamed", "normal_loss"]:
# The module has been moved, nothing to do
return
if self.odoo_module.module_type == "odoo":
if self.release == self.odoo_module.analyse.initial_release:
# No work to do for the initial release
return
if self.openupgrade_state and (
self.openupgrade_state.lower().startswith("done")
or self.openupgrade_state.lower().startswith("nothing to do")
):
return
else:
# TODO
self.workload = 99
# OCA / Custom Module
if self.release != self.odoo_module.analyse.final_release:
# No need to work for intermediate release (in theory ;-))
return
if self.addon_path:
# The module has been ported, nothing to do
return
previous_module_version = self.get_last_existing_version()
self.workload = (
# Minimal port time
port_minimal_time
# Add time per release
+ (self.release - previous_module_version.release)
* port_per_version
# Add python time
+ (port_per_python_line_time * previous_module_version.python_code)
# Add XML Time
+ (port_per_xml_line_time * previous_module_version.xml_code)
# Add Javascript Time
+ (
port_per_javascript_line_time
* previous_module_version.javascript_code
)
)
def analyse_size(self):
self.python_code = 0
self.xml_code = 0
self.javascript_code = 0
# compute file list to analyse
file_list = []
for root, dirs, files in os.walk(
self.addon_path / Path(self.odoo_module.name), followlinks=True
):
relative_path = os.path.relpath(Path(root), self.addon_path)
if set(Path(relative_path).parts) & set(self._exclude_directories):
continue
for name in files:
if name in self._exclude_files:
continue
filename, file_extension = os.path.splitext(name)
if file_extension in self._file_extensions:
file_list.append(
(os.path.join(root, name), file_extension)
)
# Analyse files
for file_path, file_ext in file_list:
file_res = SourceAnalysis.from_file(
file_path, "", encoding="utf-8"
)
if file_ext == ".py":
self.python_code += file_res.code
elif file_ext == ".xml":
self.xml_code += file_res.code
elif file_ext == ".js":
self.javascript_code += file_res.code
def analyse_openupgrade_state(self, coverage_analysis):
if self.release == self.odoo_module.analyse.initial_release:
return
self.openupgrade_state = coverage_analysis[self.release].get(
self.odoo_module.name, False
)
def analyse_openupgrade_work(self, analysis_files):
if self.release == self.odoo_module.analyse.initial_release:
return
analysis_file = analysis_files.get(self.odoo_module.name, False)
if analysis_file:
# TODO
pass
else:
# TODO
pass
def workload_hour_text(self):
if not self.workload:
return ""
return "%d h" % (int(round(self.workload / 60)))
def get_size_text(self):
data = {
"Python": self.python_code,
"XML": self.xml_code,
"JavaScript": self.javascript_code,
}
# Remove empty values
data = {k: v for k, v in data.items() if v}
if not data:
return ""
else:
return ", ".join(["%s: %s" % (a, b) for a, b in data.items()])
def analyse_missing_module(self):
last_existing_version = self.get_last_existing_version()
last_existing_version.analyse_size()
def get_bg_color(self):
if self.addon_path:
if (
self.odoo_module.module_type == "odoo"
and self.release != self.odoo_module.analyse.initial_release
):
if self.openupgrade_state and (
self.openupgrade_state.lower().startswith("done")
or self.openupgrade_state.lower().startswith(
"nothing to do"
)
):
return "lightgreen"
else:
return "orange"
return "lightgreen"
else:
# The module doesn't exist in the current release
if self.state in ["merged", "renamed", "normal_loss"]:
# Normal case, the previous version has been renamed
# or merged
return "lightgray"
if self.odoo_module.module_type == "odoo":
# A core module disappeared and has not been merged
# or renamed
return "red"
elif self.release != self.odoo_module.analyse.final_release:
return "lightgray"
else:
return "orange"
def get_text(self):
if self.addon_path:
if (
self.odoo_module.module_type == "odoo"
and self.release != self.odoo_module.analyse.initial_release
):
if self.openupgrade_state.lower().startswith(
"done"
) or self.openupgrade_state.lower().startswith(
"nothing to do"
):
return self.openupgrade_state
else:
return "To analyse"
return ""
else:
if self.state == "merged":
return "Merged into %s" % self.target_module
elif self.state == "renamed":
return "Renamed into %s" % self.target_module
elif self.state == "normal_loss":
return ""
if self.odoo_module.module_type == "odoo":
# A core module disappeared and has not been merged
# or renamed
return "Module lost"
elif self.release != self.odoo_module.analyse.final_release:
return "Unported"
else:
return (
"To port from %s"
% self.get_last_existing_version().release
)

269
poetry.lock generated
View File

@ -25,17 +25,17 @@ test = ["coverage", "flake8", "pexpect", "wheel"]
[[package]]
name = "astroid"
version = "2.9.0"
version = "2.11.5"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = "~=3.6"
python-versions = ">=3.6.2"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
wrapt = ">=1.11,<1.14"
wrapt = ">=1.11,<2"
[[package]]
name = "atomicwrites"
@ -67,6 +67,14 @@ category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "chardet"
version = "4.0.0"
description = "Universal encoding detector for Python 2 and 3"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "charset-normalizer"
version = "2.0.12"
@ -116,6 +124,17 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "commonmark"
version = "0.9.1"
description = "Python parser for the CommonMark Markdown spec"
category = "main"
optional = false
python-versions = "*"
[package.extras]
test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
[[package]]
name = "contextvars"
version = "2.4"
@ -141,6 +160,25 @@ tomli = {version = "*", optional = true, markers = "extra == \"toml\""}
[package.extras]
toml = ["tomli"]
[[package]]
name = "dataclasses"
version = "0.8"
description = "A backport of the dataclasses module for Python 3.6"
category = "main"
optional = false
python-versions = ">=3.6, <3.7"
[[package]]
name = "dill"
version = "0.3.4"
description = "serialize all of python"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*"
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "distlib"
version = "0.3.4"
@ -306,16 +344,17 @@ python-versions = "*"
[[package]]
name = "isort"
version = "5.8.0"
version = "5.10.1"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "jinja2"
@ -376,11 +415,11 @@ python-versions = ">=3.6"
[[package]]
name = "mccabe"
version = "0.6.1"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = "*"
python-versions = ">=3.6"
[[package]]
name = "odoorpc"
@ -467,22 +506,47 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pylint"
name = "pygments"
version = "2.12.0"
description = "Pygments is a syntax highlighting package written in Python."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "pygount"
version = "1.4.0"
description = "count source lines of code (SLOC) using pygments"
category = "main"
optional = false
python-versions = ">=3.6.3,<4.0.0"
[package.dependencies]
chardet = ">=4,<5"
pygments = ">=2,<3"
rich = ">=9,<13"
[[package]]
name = "pylint"
version = "2.13.9"
description = "python code static checker"
category = "dev"
optional = false
python-versions = "~=3.6"
python-versions = ">=3.6.2"
[package.dependencies]
astroid = ">=2.9.0,<2.10"
astroid = ">=2.11.5,<=2.12.0-dev0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
dill = ">=0.2"
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.7"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
toml = ">=0.9.2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras]
testutil = ["gitpython (>3)"]
[[package]]
name = "pyparsing"
version = "3.0.7"
@ -587,6 +651,23 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
name = "rich"
version = "12.4.4"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "main"
optional = false
python-versions = ">=3.6.3,<4.0.0"
[package.dependencies]
commonmark = ">=0.9.0,<0.10.0"
dataclasses = {version = ">=0.7,<0.9", markers = "python_version < \"3.7\""}
pygments = ">=2.6.0,<3.0.0"
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "safety"
version = "1.10.3"
@ -760,7 +841,7 @@ dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[[package]]
name = "wrapt"
version = "1.13.3"
version = "1.14.1"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
@ -780,8 +861,8 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes
[metadata]
lock-version = "1.1"
python-versions = "^3.6"
content-hash = "c86a563043f2c105d46b393c93b6d10d67e35917d5dfbd0dd83daf42e62e3dcd"
python-versions = ">=3.6.3,<4.0.0"
content-hash = "cf6e4e8a8d4c26f956c108ccf66ac8e1b074c92bdf3c962537f55fb93bdddf10"
[metadata.files]
aiocontextvars = [
@ -793,8 +874,8 @@ argcomplete = [
{file = "argcomplete-2.0.0.tar.gz", hash = "sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"},
]
astroid = [
{file = "astroid-2.9.0-py3-none-any.whl", hash = "sha256:776ca0b748b4ad69c00bfe0fff38fa2d21c338e12c84aa9715ee0d473c422778"},
{file = "astroid-2.9.0.tar.gz", hash = "sha256:5939cf55de24b92bda00345d4d0659d01b3c7dafb5055165c330bc7c568ba273"},
{file = "astroid-2.11.5-py3-none-any.whl", hash = "sha256:14ffbb4f6aa2cf474a0834014005487f7ecd8924996083ab411e7fa0b508ce0b"},
{file = "astroid-2.11.5.tar.gz", hash = "sha256:f4e4ec5294c4b07ac38bab9ca5ddd3914d4bf46f9006eb5c0ae755755061044e"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
@ -808,6 +889,10 @@ certifi = [
{file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"},
{file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"},
]
chardet = [
{file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"},
{file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
]
charset-normalizer = [
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
@ -827,6 +912,10 @@ colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
commonmark = [
{file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
{file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
]
contextvars = [
{file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"},
]
@ -879,6 +968,14 @@ coverage = [
{file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"},
{file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"},
]
dataclasses = [
{file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"},
{file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"},
]
dill = [
{file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"},
{file = "dill-0.3.4.zip", hash = "sha256:9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675"},
]
distlib = [
{file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
{file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
@ -979,8 +1076,8 @@ iniconfig = [
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
{file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"},
{file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"},
{file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
{file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
]
jinja2 = [
{file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"},
@ -1104,8 +1201,8 @@ markupsafe = [
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
odoorpc = [
{file = "OdooRPC-0.8.0-py2.py3-none-any.whl", hash = "sha256:abd8fa8385da635ac4848e9a8944c01929c0606cb99743d7b8f2e887800e84e3"},
@ -1133,9 +1230,16 @@ py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pygments = [
{file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"},
{file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"},
]
pygount = [
{file = "pygount-1.4.0-py3-none-any.whl", hash = "sha256:be98bacc3a4f72ce31361338de3caeb76b6edfbaffa0732b38d3cb3fbb64d246"},
]
pylint = [
{file = "pylint-2.12.0-py3-none-any.whl", hash = "sha256:ba00afcb1550bc217bbcb0eb76c10cb8335f7417a3323bdd980c29fb5b59f8d2"},
{file = "pylint-2.12.0.tar.gz", hash = "sha256:245c87e5da54c35b623c21b35debf87d93b18bf9e0229515cc172d0b83d627cd"},
{file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"},
{file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"},
]
pyparsing = [
{file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
@ -1200,6 +1304,10 @@ requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
]
rich = [
{file = "rich-12.4.4-py3-none-any.whl", hash = "sha256:d2bbd99c320a2532ac71ff6a3164867884357da3e3301f0240090c5d2fdac7ec"},
{file = "rich-12.4.4.tar.gz", hash = "sha256:4c586de507202505346f3e32d1363eb9ed6932f0c2f63184dea88983ff4971e2"},
]
safety = [
{file = "safety-1.10.3-py2.py3-none-any.whl", hash = "sha256:5f802ad5df5614f9622d8d71fedec2757099705c2356f862847c58c6dfe13e84"},
{file = "safety-1.10.3.tar.gz", hash = "sha256:30e394d02a20ac49b7f65292d19d38fa927a8f9582cdfd3ad1adbbc66c641ad5"},
@ -1279,57 +1387,70 @@ win32-setctime = [
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]
wrapt = [
{file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"},
{file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"},
{file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"},
{file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"},
{file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"},
{file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"},
{file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"},
{file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"},
{file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"},
{file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"},
{file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"},
{file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"},
{file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"},
{file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"},
{file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"},
{file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"},
{file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"},
{file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"},
{file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"},
{file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"},
{file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"},
{file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"},
{file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"},
{file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"},
{file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"},
{file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"},
{file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"},
{file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"},
{file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"},
{file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"},
{file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"},
{file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"},
{file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"},
{file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"},
{file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"},
{file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"},
{file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"},
{file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"},
{file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"},
{file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"},
{file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"},
{file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"},
{file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"},
{file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"},
{file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"},
{file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"},
{file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"},
{file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"},
{file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"},
{file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"},
{file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"},
{file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
{file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
{file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
{file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
{file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
{file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
{file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
{file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
{file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
{file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
{file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
{file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
{file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
{file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
{file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
{file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
{file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
{file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
{file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
{file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
{file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
zipp = [
{file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"},

View File

@ -21,7 +21,6 @@ classifiers = [
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Odoo",
]
@ -29,7 +28,7 @@ classifiers = [
odoo-openupgrade-wizard = "odoo_openupgrade_wizard.cli:main"
[tool.poetry.dependencies]
python = "^3.6"
python = ">=3.6.3,<4.0.0"
click = "^7.0"
click-loglevel = "^0.4"
odoorpc = "^0.8"
@ -40,6 +39,7 @@ git-aggregator = "^2.1"
docker = "^5.0"
pyyaml = "5.4.1"
GitPython = "^3.1"
pygount = "^1.4"
[tool.poetry.dev-dependencies]
pytest = [

View File

@ -15,7 +15,8 @@ def test_cli_init():
"--project-name=test-cli",
"--initial-release=13.0",
"--final-release=14.0",
"--extra-repository=OCA/web,OCA/server-tools",
"--extra-repository="
"OCA/web,OCA/server-tools,OCA/bank-statement-import",
]
)

View File

@ -1,13 +1,50 @@
import unittest
from pathlib import Path
from . import cli_runner_invoke, move_to_test_folder
def test_cli_estimate_workload():
class TestCliEstimateWorkload(unittest.TestCase):
def test_cli_estimate_workload(self):
move_to_test_folder()
cli_runner_invoke(
[
"--log-level=DEBUG",
"estimate-workload",
"--extra-modules="
# Done Module
"account"
# Deleted module (because merged)
",account_analytic_default"
# Deleted module (because renamed)
",account_facturx"
# Deleted module (because lost merge)
",base_gengo"
# Some modules that are not ported (for the time being)
",l10n_be_invoice_bba,l10n_ch_qriban,l10n_latam_base"
# OCA Portted Modules
",web_responsive"
# OCA Unported modules
",web_boolean_button"
",web_editor_background_color"
",web_pivot_computed_measure"
",web_view_calendar_list"
",web_widget_child_selector"
",web_widget_one2many_tree_line_duplicate"
",web_widget_dropdown_dynamic_example",
]
)
# We check file has been created
# parsing this file is a mess, so we don't do it ;-)
assert Path("./analysis.html").exists()
with self.assertRaises(ValueError):
cli_runner_invoke(
[
"--log-level=DEBUG",
"estimate-workload",
"--extra-modules=my_module_that_doesnt_exist",
]
)
# TODO, write test

View File

@ -7,6 +7,7 @@ postgres_container_name: test-cli-db
odoo_host_xmlrpc_port: 9069
odoo_default_country_code: FR
odoo_versions:
- release: 13.0
@ -30,3 +31,21 @@ migration_steps:
release: 14.0
action: update
complete_name: step_03__update__14.0
workload_settings:
# porting a module requires 45 minutes minimaly
port_minimal_time: 45
# a migration cost more for each version
port_per_version: 15
# Porting 120 lines of Python code costs 1 hour
port_per_python_line_time: 0.5
# Porting 120 lines of Python code costs 1 hour
port_per_javascript_line_time: 0.5
# Porting 10 lines of XML costs 1 minute
port_per_xml_line_time: 0.10