Merge branch '26-dump-db-command' into 'main'
[ADD] dumpdb command Closes #26 See merge request odoo-openupgrade-wizard/odoo-openupgrade-wizard!32
This commit is contained in:
commit
60f7b78413
|
|
@ -31,18 +31,7 @@ pytest:
|
|||
- echo $PATH
|
||||
- echo $PYTHONPATH
|
||||
- poetry run pytest --version
|
||||
|
||||
- poetry run pytest --verbosity=2 --exitfirst --cov odoo_openupgrade_wizard
|
||||
tests/cli_01_init_test.py
|
||||
tests/cli_02_get_code_test.py
|
||||
tests/cli_03_docker_build_test.py
|
||||
tests/cli_04_run_test.py
|
||||
tests/cli_05_execute_script_python_test.py
|
||||
tests/cli_06_execute_script_sql_test.py
|
||||
tests/cli_07_upgrade_test.py
|
||||
tests/cli_08_estimate_workload_test.py
|
||||
tests/cli_20_install_from_csv_test.py
|
||||
tests/cli_21_generate_module_analysis_test.py
|
||||
|
||||
build:
|
||||
stage: build
|
||||
|
|
|
|||
46
README.md
46
README.md
|
|
@ -40,6 +40,7 @@ and provides helpers to run (and replay) migrations until it works.
|
|||
* [Command ``generate-module-analysis`` (BETA)](#command-generate-module-analysis)
|
||||
* [Command ``estimate-workload`` (BETA)](#command-estimate-workload)
|
||||
* [Command ``psql``](#command-psql)
|
||||
* [Command ``dumpdb``](#command-dumpdb)
|
||||
|
||||
<a name="installation"/>
|
||||
|
||||
|
|
@ -435,3 +436,48 @@ Result:
|
|||
```
|
||||
|
||||
See all the options here https://www.postgresql.org/docs/current/app-psql.html
|
||||
|
||||
<a name="command-dumpdb"/>
|
||||
|
||||
## Command: ``dumpdb``
|
||||
|
||||
**Prerequites:** init
|
||||
|
||||
```
|
||||
odoo-openupgrade-wizard dumpdb
|
||||
--database DB_NAME
|
||||
--database-path DATABASE_PATH
|
||||
--filestore-path FILESTORE_PATH
|
||||
```
|
||||
|
||||
Dump the database DB_NAME to DATABASE_PATH and export the filestore
|
||||
related to DB_NAME into FILESTORE_PATH. To choose the format of the
|
||||
backup files look at the `--database-format` and `--filestore-format`.
|
||||
|
||||
*WARNING*: DATABASE_PATH should be a sub directory of the project path
|
||||
in orter to have the postgresql container able to write the dump file.
|
||||
For example, the project path is `/path/to/myproject` (where you run the
|
||||
`init` command), then DATABASE_PATH can be any of the subdirectory of
|
||||
`/path/to/myproject`.
|
||||
|
||||
**Optional arguments**
|
||||
|
||||
* To chose the database format use `--database-format`. Format can be
|
||||
one of the following:
|
||||
- `p` for plain sql text
|
||||
- `c` for custom compressed backup of `pg_dump`
|
||||
- `d` for directory structure
|
||||
- `t` for a tar version of the directory structure
|
||||
See also https://www.postgresql.org/docs/current/app-pgdump.html
|
||||
The default database format is `c`.
|
||||
|
||||
* To chose the filestore format use `--filestore-format`. Format can be
|
||||
one of the following:
|
||||
- `d` copy of the directory structure
|
||||
- `t` tar version of the directory structure (not compressed)
|
||||
- `tgz` tar version of the directory structure compressed with gzip.
|
||||
The default filestore format is `tgz`.
|
||||
|
||||
* By default, if database file or filestore file already exists, the
|
||||
command will fail, preserving the existing dump. If you need to
|
||||
overwrite the existing files, the `--force` option can be used.
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from loguru import logger
|
|||
import odoo_openupgrade_wizard
|
||||
from odoo_openupgrade_wizard.cli.cli_copydb import copydb
|
||||
from odoo_openupgrade_wizard.cli.cli_docker_build import docker_build
|
||||
from odoo_openupgrade_wizard.cli.cli_dumpdb import dumpdb
|
||||
from odoo_openupgrade_wizard.cli.cli_estimate_workload import estimate_workload
|
||||
from odoo_openupgrade_wizard.cli.cli_execute_script_python import (
|
||||
execute_script_python,
|
||||
|
|
@ -107,6 +108,7 @@ def main(ctx, env_folder, filestore_folder, log_level):
|
|||
|
||||
main.add_command(copydb)
|
||||
main.add_command(docker_build)
|
||||
main.add_command(dumpdb)
|
||||
main.add_command(estimate_workload)
|
||||
main.add_command(execute_script_python)
|
||||
main.add_command(execute_script_sql)
|
||||
|
|
|
|||
109
odoo_openupgrade_wizard/cli/cli_dumpdb.py
Normal file
109
odoo_openupgrade_wizard/cli/cli_dumpdb.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
import pathlib
|
||||
import shutil
|
||||
|
||||
import click
|
||||
|
||||
from odoo_openupgrade_wizard.cli.cli_options import database_option_required
|
||||
from odoo_openupgrade_wizard.tools.tools_postgres import execute_pg_dump
|
||||
from odoo_openupgrade_wizard.tools.tools_system import dump_filestore
|
||||
|
||||
|
||||
@click.command()
|
||||
@database_option_required
|
||||
@click.option(
|
||||
"--database-path",
|
||||
type=click.Path(writable=True, resolve_path=True),
|
||||
required=True,
|
||||
help="Path to the database dump relative project folder.",
|
||||
)
|
||||
@click.option(
|
||||
"--database-format",
|
||||
type=click.Choice(("p", "c", "d", "t")),
|
||||
default="c",
|
||||
help="Database format (see pg_dump options): plain sql text (p), "
|
||||
"custom format compressed (c), directory (d), tar file (t).",
|
||||
)
|
||||
@click.option(
|
||||
"--filestore-path",
|
||||
type=click.Path(writable=True, resolve_path=True),
|
||||
required=True,
|
||||
help="Path to the filestore backup.",
|
||||
)
|
||||
@click.option(
|
||||
"--filestore-format",
|
||||
type=click.Choice(("d", "t", "tgz")),
|
||||
default="tgz",
|
||||
help="Filestore format: directory (d), tar file (t), "
|
||||
"tar file compressed with gzip (tgz)",
|
||||
)
|
||||
@click.option(
|
||||
"--force",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Overwrite file if they already exists.",
|
||||
)
|
||||
@click.pass_context
|
||||
def dumpdb(
|
||||
ctx,
|
||||
database,
|
||||
database_path,
|
||||
database_format,
|
||||
filestore_path,
|
||||
filestore_format,
|
||||
force,
|
||||
):
|
||||
"""Create an dump of an Odoo database and its filestore."""
|
||||
database_path = pathlib.Path(database_path)
|
||||
filestore_path = pathlib.Path(filestore_path)
|
||||
|
||||
# Check that database_path is inside the env_folder_path
|
||||
absolute_database_path = database_path.absolute()
|
||||
absolute_env_folder_path = ctx.obj["env_folder_path"].resolve().absolute()
|
||||
if not str(absolute_database_path).startswith(
|
||||
str(absolute_env_folder_path)
|
||||
):
|
||||
ctx.fail(
|
||||
"database-path should be inside the project path to allow "
|
||||
"postgresql to write to it."
|
||||
)
|
||||
|
||||
# Fail if dumps already exists and force argument not given
|
||||
# Remove file if already exists and force is given
|
||||
if not force and database_path.exists():
|
||||
ctx.fail(f"{database_path} exists, use --force to overwrite it.")
|
||||
elif force and database_path.exists():
|
||||
if database_path.is_dir():
|
||||
shutil.rmtree(database_path)
|
||||
else:
|
||||
database_path.unlink()
|
||||
|
||||
if not force and filestore_path.exists():
|
||||
ctx.fail(f"{filestore_path} exists, use --force to overwrite it.")
|
||||
elif force and filestore_path.exists():
|
||||
if filestore_path.is_dir():
|
||||
shutil.rmtree(filestore_path)
|
||||
else:
|
||||
filestore_path.unlink()
|
||||
|
||||
# Normalise database_path
|
||||
database_path = absolute_database_path.relative_to(
|
||||
absolute_env_folder_path
|
||||
)
|
||||
|
||||
# dump the database
|
||||
output = execute_pg_dump(
|
||||
ctx,
|
||||
database=database,
|
||||
dumpformat=database_format,
|
||||
filename=str(database_path),
|
||||
)
|
||||
if output:
|
||||
click.echo(output)
|
||||
|
||||
# dump the filestore
|
||||
dump_filestore(
|
||||
ctx,
|
||||
database=database,
|
||||
destpath=filestore_path,
|
||||
copyformat=filestore_format,
|
||||
)
|
||||
|
|
@ -180,3 +180,58 @@ def execute_sql_files_pre_migration(
|
|||
|
||||
for sql_file in sql_files:
|
||||
execute_sql_file(ctx, database, sql_file)
|
||||
|
||||
|
||||
def chown_to_local_user(ctx, filepath: os.PathLike):
|
||||
"""Chown a filepath in the postgres container to the local user"""
|
||||
container = get_postgres_container(ctx)
|
||||
user_uid = os.getuid()
|
||||
command = "chown -R {uid}:{uid} {filepath}".format(
|
||||
uid=user_uid, filepath=filepath
|
||||
)
|
||||
logger.debug(
|
||||
"Executing the following command in postgres container: %s"
|
||||
% (command,)
|
||||
)
|
||||
chown_result = exec_container(container, command)
|
||||
return chown_result.output.decode("utf8")
|
||||
|
||||
|
||||
def execute_pg_dump(
|
||||
ctx,
|
||||
database: str,
|
||||
dumpformat: str,
|
||||
filename: str,
|
||||
pg_dump_args="--no-owner",
|
||||
):
|
||||
"""Execute pg_dump command on the postgres container and dump the
|
||||
result to dumpfile.
|
||||
"""
|
||||
if pg_dump_args and not isinstance(pg_dump_args, str):
|
||||
pg_dump_args = " ".join(pg_dump_args)
|
||||
container = get_postgres_container(ctx)
|
||||
# Generate path for the output file
|
||||
filepath = Path("/env") / Path(filename)
|
||||
# Generate pg_dump command
|
||||
command = (
|
||||
"pg_dump"
|
||||
" --username=odoo"
|
||||
" --format {dumpformat}"
|
||||
" --file {filepath}"
|
||||
" {pg_dump_args}"
|
||||
" {database}"
|
||||
).format(
|
||||
dumpformat=dumpformat,
|
||||
filepath=filepath,
|
||||
database=database,
|
||||
pg_dump_args=pg_dump_args,
|
||||
)
|
||||
logger.debug(
|
||||
"Executing the following command in postgres container: %s"
|
||||
% (command,)
|
||||
)
|
||||
pg_dump_result = exec_container(container, command)
|
||||
|
||||
chown_to_local_user(ctx, filepath)
|
||||
|
||||
return pg_dump_result.output.decode("utf8")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
|
||||
import importlib_resources
|
||||
|
|
@ -109,3 +111,36 @@ def get_local_user_id():
|
|||
def execute_check_output(args_list, working_directory=False):
|
||||
logger.debug("Execute %s" % " ".join(args_list))
|
||||
subprocess.check_output(args_list, cwd=working_directory)
|
||||
|
||||
|
||||
def dump_filestore(
|
||||
ctx,
|
||||
database: str,
|
||||
destpath: os.PathLike,
|
||||
copyformat: str = "d",
|
||||
):
|
||||
"""Copy filestore of database to destpath using copyformat.
|
||||
copyformat can be 'd' for directory, a normal copy, or 't' for a
|
||||
copy into a tar achive, or 'tgz' to copy to a compressed tar file.
|
||||
"""
|
||||
valid_format = ("d", "t", "tgz", "txz")
|
||||
if copyformat not in valid_format:
|
||||
raise ValueError(
|
||||
f"copyformat should be one of the following {valid_format}"
|
||||
)
|
||||
|
||||
filestore_folder_path = ctx.obj["env_folder_path"] / "filestore/filestore"
|
||||
filestore_path = filestore_folder_path / database
|
||||
|
||||
if copyformat == "d":
|
||||
shutil.copytree(filestore_path, destpath)
|
||||
|
||||
elif copyformat.startswith("t"):
|
||||
wmode = "w"
|
||||
if copyformat.endswith("gz"):
|
||||
wmode += ":gz"
|
||||
elif copyformat.endswith("xz"):
|
||||
wmode += ":xz"
|
||||
|
||||
with tarfile.open(destpath, wmode) as tar:
|
||||
tar.add(filestore_path, arcname="filestore")
|
||||
|
|
|
|||
|
|
@ -28,17 +28,20 @@ def move_to_test_folder():
|
|||
os.chdir(test_folder_path)
|
||||
|
||||
|
||||
def cli_runner_invoke(cmd):
|
||||
def cli_runner_invoke(cmd, expect_success=True):
|
||||
try:
|
||||
result = CliRunner().invoke(
|
||||
main,
|
||||
cmd,
|
||||
catch_exceptions=False,
|
||||
)
|
||||
if not result.exit_code == 0:
|
||||
_logger.error("exit_code: %s" % result.exit_code)
|
||||
_logger.error("output: %s" % result.output)
|
||||
assert result.exit_code == 0
|
||||
if expect_success:
|
||||
if not result.exit_code == 0:
|
||||
_logger.error("exit_code: %s" % result.exit_code)
|
||||
_logger.error("output: %s" % result.output)
|
||||
assert result.exit_code == 0
|
||||
else:
|
||||
assert result.exit_code != 0
|
||||
except Exception as exception:
|
||||
if Path("log").exists():
|
||||
log_files = [
|
||||
|
|
|
|||
155
tests/cli_22_dumpdb_test.py
Normal file
155
tests/cli_22_dumpdb_test.py
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
import pathlib
|
||||
import shutil
|
||||
|
||||
from odoo_openupgrade_wizard.tools.tools_postgres import ensure_database
|
||||
|
||||
from . import (
|
||||
build_ctx_from_config_file,
|
||||
cli_runner_invoke,
|
||||
move_to_test_folder,
|
||||
)
|
||||
|
||||
|
||||
def test_cli_dumpdb():
|
||||
move_to_test_folder()
|
||||
|
||||
# Initialize database
|
||||
db_name = "database_test_cli___dumpdb"
|
||||
ctx = build_ctx_from_config_file()
|
||||
ensure_database(ctx, db_name, state="absent")
|
||||
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"install-from-csv",
|
||||
f"--database={db_name}",
|
||||
],
|
||||
)
|
||||
|
||||
# Dump database and filestore
|
||||
formatlist = [("p", "d"), ("c", "tgz"), ("t", "t"), ("d", "d")]
|
||||
for formats in formatlist:
|
||||
database_path = pathlib.Path("database_test_cli___dumpdb")
|
||||
filestore_path = pathlib.Path("database_test_clie___dumpdb.filestore")
|
||||
|
||||
assert not database_path.exists()
|
||||
assert not filestore_path.exists()
|
||||
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"dumpdb",
|
||||
f"--database={db_name}",
|
||||
f"--database-path={database_path}",
|
||||
f"--database-format={formats[0]}",
|
||||
f"--filestore-path={filestore_path}",
|
||||
f"--filestore-format={formats[1]}",
|
||||
],
|
||||
)
|
||||
|
||||
assert database_path.exists()
|
||||
assert filestore_path.exists()
|
||||
|
||||
# Cleanup files
|
||||
if database_path.is_dir():
|
||||
shutil.rmtree(database_path)
|
||||
else:
|
||||
database_path.unlink()
|
||||
|
||||
if filestore_path.is_dir():
|
||||
shutil.rmtree(filestore_path)
|
||||
else:
|
||||
filestore_path.unlink()
|
||||
|
||||
|
||||
def test_cli_dumpdb_failure():
|
||||
move_to_test_folder()
|
||||
|
||||
# Initialize database
|
||||
db_name = "database_test_cli___dumpdb"
|
||||
ctx = build_ctx_from_config_file()
|
||||
ensure_database(ctx, db_name, state="absent")
|
||||
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"install-from-csv",
|
||||
f"--database={db_name}",
|
||||
],
|
||||
)
|
||||
|
||||
# First dump
|
||||
formats = ("d", "d")
|
||||
database_path = pathlib.Path("database_test_cli___dumpdb")
|
||||
filestore_path = pathlib.Path("database_test_clie___dumpdb.filestore")
|
||||
|
||||
assert not database_path.exists()
|
||||
assert not filestore_path.exists()
|
||||
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"dumpdb",
|
||||
f"--database={db_name}",
|
||||
f"--database-path={database_path}",
|
||||
f"--database-format={formats[0]}",
|
||||
f"--filestore-path={filestore_path}",
|
||||
f"--filestore-format={formats[1]}",
|
||||
],
|
||||
)
|
||||
|
||||
assert database_path.exists()
|
||||
assert filestore_path.exists()
|
||||
|
||||
# With same name
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"dumpdb",
|
||||
f"--database={db_name}",
|
||||
f"--database-path={database_path}",
|
||||
f"--database-format={formats[0]}",
|
||||
f"--filestore-path={filestore_path}",
|
||||
f"--filestore-format={formats[1]}",
|
||||
],
|
||||
expect_success=False,
|
||||
)
|
||||
|
||||
# With --force
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"dumpdb",
|
||||
f"--database={db_name}",
|
||||
f"--database-path={database_path}",
|
||||
f"--database-format={formats[0]}",
|
||||
f"--filestore-path={filestore_path}",
|
||||
f"--filestore-format={formats[1]}",
|
||||
"--force",
|
||||
],
|
||||
)
|
||||
|
||||
# With name outside of project path
|
||||
cli_runner_invoke(
|
||||
[
|
||||
"--log-level=DEBUG",
|
||||
"dumpdb",
|
||||
f"--database={db_name}",
|
||||
f"--database-path=/{database_path}",
|
||||
f"--database-format={formats[0]}",
|
||||
f"--filestore-path=/{filestore_path}",
|
||||
f"--filestore-format={formats[1]}",
|
||||
],
|
||||
expect_success=False,
|
||||
)
|
||||
|
||||
# Cleanup files
|
||||
if database_path.is_dir():
|
||||
shutil.rmtree(database_path)
|
||||
else:
|
||||
database_path.unlink()
|
||||
|
||||
if filestore_path.is_dir():
|
||||
shutil.rmtree(filestore_path)
|
||||
else:
|
||||
filestore_path.unlink()
|
||||
Loading…
Reference in New Issue
Block a user