Merge branch 'cosmetic-changes' into 'main'

[REF] Cosmetic changes (remove useless comment + f-string everywhere)

See merge request odoo-openupgrade-wizard/odoo-openupgrade-wizard!61
This commit is contained in:
Rémy Taymans 2024-03-23 22:04:48 +00:00
commit bd1190ab06
20 changed files with 137 additions and 210 deletions

View File

@ -67,7 +67,7 @@ def main(ctx, env_folder, filestore_folder, log_level):
date_begin = datetime.datetime.now()
logger.remove()
logger.add(sys.stderr, level=log_level)
logger.debug("Beginning script '%s' ..." % (ctx.invoked_subcommand))
logger.debug(f"Beginning script '{ctx.invoked_subcommand}' ...")
if not isinstance(ctx.obj, dict):
ctx.obj = {}

View File

@ -41,12 +41,12 @@ def docker_build(ctx, versions):
continue
logger.info(
"Building Odoo docker image for version '%s'. "
"This can take a while..." % (odoo_version)
f"Building Odoo docker image for version '{odoo_version}'."
" This can take a while..."
)
image = build_image(
get_odoo_env_path(ctx, odoo_version),
get_docker_image_tag(ctx, odoo_version),
{"LOCAL_USER_ID": str(get_local_user_id())},
)
logger.info("Docker Image build. '%s'" % image[0].tags[0])
logger.info(f"Docker Image build. '{image[0].tags[0]}'")

View File

@ -41,17 +41,16 @@ def generate_module_analysis(ctx, step, database, modules):
alternative_xml_rpc_port = ctx.obj["config"]["odoo_host_xmlrpc_port"] + 10
if not database:
database = "%s__analysis__" % (
ctx.obj["config"]["project_name"].replace("-", "_"),
database = (
f"{ctx.obj['config']['project_name'].replace('-', '_')}"
"__analysis__"
)
initial_database = "%s_%s" % (
database,
str(initial_step["version"]).replace(".", ""),
initial_database = (
f"{database}_{str(initial_step['version']).replace('.', '')}"
)
final_database = "%s_%s" % (
database,
str(final_step["version"]).replace(".", ""),
final_database = (
f"{database}_{str(final_step['version']).replace('.', '')}"
)
modules = modules and modules.split(",") or []

View File

@ -89,7 +89,7 @@ def init(
"name": 1,
"execution_context": "regular",
"version": odoo_versions[0],
"complete_name": "step_01__regular__%s" % (odoo_versions[0]),
"complete_name": f"step_01__regular__{odoo_versions[0]}",
}
]
# Add all Openupgrade steps
@ -100,8 +100,9 @@ def init(
"name": step_nbr,
"execution_context": "openupgrade",
"version": odoo_version,
"complete_name": "step_%s__openupgrade__%s"
% (str(step_nbr).rjust(2, "0"), odoo_version),
"complete_name": (
f"step_{step_nbr:>02}__openupgrade__{odoo_version}"
),
}
)
step_nbr += 1
@ -113,8 +114,9 @@ def init(
"name": step_nbr,
"execution_context": "regular",
"version": odoo_versions[-1],
"complete_name": "step_%s__regular__%s"
% (str(step_nbr).rjust(2, "0"), odoo_versions[-1]),
"complete_name": (
f"step_{step_nbr:>02}__regular__{odoo_versions[-1]}"
),
}
)

View File

@ -26,11 +26,11 @@ def install_from_csv(ctx, database, with_demo):
# Get modules list from the CSV file
module_names = get_odoo_modules_from_csv(ctx.obj["module_file_path"])
module_names.sort()
logger.info("Found %d modules." % (len(module_names)))
logger.info(f"Found {len(module_names)} modules.")
logger.debug(module_names)
try:
logger.info("Install 'base' module on %s database ..." % (database))
logger.info(f"Install 'base' module on {database} database ...")
run_odoo(
ctx,
migration_step,
@ -55,12 +55,10 @@ def install_from_csv(ctx, database, with_demo):
)
if len(countries) != 1:
raise Exception(
"Unable to find a country, based on the code %s."
" countries found : %s "
% (
odoo_default_company["country_code"],
", ".join([x.name for x in countries]),
)
f"Unable to find a country, based on the"
f" code {odoo_default_company['country_code']}."
f" Countries found :"
f" {', '.join([x.name for x in countries])}"
)
vals = {
"country_id": countries[0].id,

View File

@ -93,7 +93,7 @@ def get_migration_step_from_options(ctx, step_arg):
if migration_step["name"] == step:
return migration_step
raise ValueError(
"No migration step found in configuration for step %s" % step_arg
f"No migration step found in configuration for step {step_arg}"
)
@ -114,7 +114,6 @@ def get_migration_steps_from_options(ctx, first_step_arg, last_step_arg):
return result
raise ValueError(
"Unable to define steps in configuration"
" from options. (first step %s ; last step %s)"
% (first_step_arg, last_step_arg)
"Unable to define steps in configuration from options."
f" (first step {first_step_arg} ; last step {last_step_arg})"
)

View File

@ -70,9 +70,8 @@ def run(
)
if not stop_after_init:
logger.info(
"Odoo is available on your host at"
" http://localhost:%s"
% ctx.obj["config"]["odoo_host_xmlrpc_port"]
"Odoo is available on your host at http://localhost:"
f"{ctx.obj['config']['odoo_host_xmlrpc_port']}"
)
input("Press 'Enter' to kill the odoo container and exit ...")
except (KeyboardInterrupt, SystemExit):

View File

@ -13,14 +13,14 @@ def pull_image(image_name):
def build_image(path, tag, buildargs={}):
logger.debug(
"Building image named based on %s/Dockerfile."
" This can take a big while ..." % (path)
f"Building image named based on {path}/Dockerfile."
" This can take a big while ..."
)
debug_docker_command = "docker build %s --tag %s" % (path, tag)
debug_docker_command = f"docker build {path} --tag {tag}"
for arg_name, arg_value in buildargs.items():
debug_docker_command += f"\\\n --build-arg {arg_name}={arg_value}"
logger.debug("DOCKER COMMAND:\n\n%s\n" % debug_docker_command)
logger.debug(f"DOCKER COMMAND:\n\n{debug_docker_command}\n")
docker_client = get_docker_client()
image = docker_client.images.build(
path=str(path),
@ -45,32 +45,29 @@ def run_container(
client = get_docker_client()
if not client.images.list(filters={"reference": image_name}):
raise Exception(
"The image %s is not available on your system."
f"The image {image_name} is not available on your system."
" Did you run 'odoo-openupgrade-wizard docker-build' ?"
% image_name
)
logger.debug("Launching Docker container named %s ..." % (image_name))
debug_docker_command = "docker run --name %s\\\n" % (container_name)
logger.debug(f"Launching Docker container named {image_name} ...")
debug_docker_command = f"docker run --name {container_name}\\\n"
for k, v in ports.items():
debug_docker_command += " --publish {k}:{v}\\\n".format(k=k, v=v)
debug_docker_command += f" --publish {k}:{v}\\\n"
for k, v in volumes.items():
debug_docker_command += " --volume {k}:{v}\\\n".format(
k=str(k), v=str(v)
)
debug_docker_command += f" --volume {k}:{v}\\\n"
for k, v in environments.items():
debug_docker_command += " --env {k}={v}\\\n".format(k=k, v=v)
debug_docker_command += f" --env {k}={v}\\\n"
for k, v in links.items():
debug_docker_command += " --link {k}:{v}\\\n".format(k=k, v=v)
debug_docker_command += f" --link {k}:{v}\\\n"
if auto_remove:
debug_docker_command += " --rm"
if detach:
debug_docker_command += " --detach"
debug_docker_command += " %s" % (image_name)
debug_docker_command += f" {image_name}"
if command:
debug_docker_command += " \\\n%s" % (command)
logger.debug("DOCKER COMMAND:\n%s" % debug_docker_command)
debug_docker_command += f" \\\n{command}"
logger.debug(f"DOCKER COMMAND:\n{debug_docker_command}")
container = client.containers.run(
image_name,
@ -84,7 +81,7 @@ def run_container(
auto_remove=auto_remove,
)
if detach:
logger.debug("Container %s launched." % image_name)
logger.debug(f"Container {image_name} launched.")
elif auto_remove:
logger.debug("Container closed.")
@ -92,22 +89,16 @@ def run_container(
def exec_container(container, command):
debug_docker_command = "docker exec %s" % (container.name)
debug_docker_command += " \\\n%s" % (command)
logger.debug("DOCKER COMMAND:\n%s" % debug_docker_command)
debug_docker_command = f"docker exec {container.name}"
debug_docker_command += f" \\\n{command}"
logger.debug(f"DOCKER COMMAND:\n{debug_docker_command}")
docker_result = container.exec_run(command)
if docker_result.exit_code != 0:
raise Exception(
"The command failed in the container %s.\n"
"- Command : %s\n"
"- Exit Code : %d\n"
"- Output: %s"
% (
container.name,
command,
docker_result.exit_code,
docker_result.output,
)
f"The command failed in the container {container.name}.\n"
f"- Command : {command}\n"
f"- Exit Code : {docker_result.exit_code}\n"
f"- Output: {docker_result.output}"
)
return docker_result
@ -122,7 +113,7 @@ def kill_container(container_name):
ignore_removed=True,
)
except docker.errors.NotFound as err:
logger.debug(f"Cannot kill container {container_name}: " + str(err))
logger.debug(f"Cannot kill container {container_name}: {err}")
containers = []
for container in containers:
@ -134,9 +125,7 @@ def kill_container(container_name):
try:
container.stop()
except docker.errors.NotFound as err:
logger.debug(
f"Cannot kill container {container.name}: " + str(err)
)
logger.debug(f"Cannot kill container {container.name}: {err}")
# TODO, we should here filter by name
# but filters={"name": container_name}

View File

@ -5,7 +5,6 @@ import sys
import traceback
from pathlib import Path
# import docker
import yaml
from loguru import logger
@ -279,7 +278,6 @@ def run_container_odoo(
links.update({ctx.obj["config"]["postgres_container_name"]: "db"})
# try:
return run_container(
get_docker_image_tag(ctx, migration_step["version"]),
get_docker_container_name(ctx, migration_step),
@ -295,18 +293,6 @@ def run_container_odoo(
detach=detached_container,
auto_remove=True,
)
# except docker.errors.ContainerError as exception:
# host_log_file_path = ctx.obj["log_folder_path"] / log_file_name
# if host_log_file_path.exists():
# with open(host_log_file_path) as _log_file:
# logger.debug("*" * 50)
# logger.debug("*" * 50)
# logger.debug("*" * 50)
# logger.debug(_log_file.read())
# logger.debug("*" * 50)
# logger.debug("*" * 50)
# logger.debug("*" * 50)
# raise exception
def kill_odoo(ctx, migration_step: dict):
@ -346,8 +332,8 @@ def execute_click_odoo_python_files(
)
try:
logger.info(
"Executing script %s / %s"
% (migration_step["complete_name"], python_file)
f"Step {migration_step['complete_name']}."
f" Executing script {python_file} ..."
)
return run_container_odoo(
ctx,
@ -369,7 +355,7 @@ def execute_click_odoo_python_files(
def get_odoo_modules_from_csv(module_file_path: Path) -> list:
logger.debug("Reading '%s' file ..." % module_file_path)
logger.debug(f"Reading '{module_file_path}' file ...")
module_names = []
csvfile = open(module_file_path, "r")
spamreader = csv.reader(csvfile, delimiter=",", quotechar='"')

View File

@ -20,8 +20,7 @@ class OdooInstance:
or ctx.obj["config"]["odoo_host_xmlrpc_port"]
)
logger.info(
"Connect to Odoo database %s via odoorpc (Port %s)... "
% (database, port)
f"Connect to database {database} via odoorpc (Port {port})..."
)
for x in range(1, _ODOO_RPC_MAX_TRY + 1):
@ -38,33 +37,24 @@ class OdooInstance:
except (socket.gaierror, socket.error) as e:
if x < _ODOO_RPC_MAX_TRY:
logger.debug(
"%d/%d Unable to connect to the server."
" Retrying in 1 second ..." % (x, _ODOO_RPC_MAX_TRY)
f"{x}/{_ODOO_RPC_MAX_TRY}"
" Unable to connect to the server."
" Retrying in 1 second ..."
)
time.sleep(1)
else:
logger.critical(
"%d/%d Unable to connect to the server."
% (x, _ODOO_RPC_MAX_TRY)
f"{x}/{_ODOO_RPC_MAX_TRY}"
" Unable to connect to the server."
)
raise e
# Login
try:
rpc_connexion.login(
database,
"admin",
"admin",
)
rpc_connexion.login(database, "admin", "admin")
except Exception as e:
logger.error(
"Unable to connect to http://localhost:%s"
" with login %s and password %s"
% (
port,
"admin",
"admin",
)
f"Unable to connect to http://localhost:{port}"
" with login 'admin' and password 'admin."
)
raise e
@ -89,35 +79,24 @@ class OdooInstance:
i = 0
for module_name in module_names:
i += 1
prefix = str(i) + "/" + str(len(module_names))
log_prefix = f"{i}/{len(module_names)} - Module '{module_name}': "
modules = self.browse_by_search(
"ir.module.module", [("name", "=", module_name)]
)
if not len(modules):
logger.error(
"%s - Module '%s': Not found." % (prefix, module_name)
)
logger.error(f"{log_prefix}': Not found.")
continue
module = modules[0]
if module.state == "installed":
logger.info(
"%s - Module %s still installed."
" skipped." % (prefix, module_name)
)
logger.info(f"{log_prefix}': still installed. Skipped.")
elif module.state == "uninstalled":
try_qty = 0
installed = False
while installed is False:
try_qty += 1
logger.info(
"%s - Module '%s': Installing ... %s"
% (
prefix,
module_name,
"(try #%d)" % try_qty if try_qty != 1 else "",
)
)
try_qty_text = f" (try #{try_qty})" if try_qty != 1 else ""
logger.info(f"{log_prefix}': Installing ...{try_qty_text}")
try:
module.button_immediate_install()
installed = True
@ -127,20 +106,18 @@ class OdooInstance:
if try_qty <= 5:
sleeping_time = 2 * try_qty * 60
logger.warning(
"Error. Retrying in %d seconds.\n %s"
% (sleeping_time, e)
f"Error. Retrying in {sleeping_time} seconds."
f"\n{e}"
)
time.sleep(sleeping_time)
else:
logger.critical(
"Error after %d try. Exiting.\n %s"
% (try_qty, e)
f"Error after {try_qty} try. Exiting." f"\n{e}"
)
raise e
else:
logger.error(
"%s - Module '%s': In the %s state."
f"{log_prefix}': In the {module.state} state."
" (Unable to install)"
% (prefix, module_name, module.state)
)
return installed_modules

View File

@ -64,8 +64,8 @@ class Analysis(object):
).strip()
elif len(splited_line) > 3:
raise ValueError(
"Incorrect value in openupgrade analysis file %s"
" for line %s" % (coverage_path, line)
"Incorrect value in openupgrade analysis"
f" file {coverage_path} for line {line}"
)
for odoo_module in filter(
@ -111,9 +111,7 @@ class Analysis(object):
module_version.estimate_workload(ctx)
def _generate_module_version_first_version(self, ctx, module_list):
logger.info(
"Analyse version %s. (First version)" % self.initial_version
)
logger.info(f"Analyse version {self.initial_version}. (First version)")
# Instanciate a new odoo_module
for module_name in module_list:
@ -122,19 +120,17 @@ class Analysis(object):
)
if addon_path:
repository_name = OdooModule.get_repository_name(addon_path)
if (
"%s.%s" % (repository_name, module_name)
not in self.modules
):
if f"{repository_name}.{module_name}" not in self.modules:
logger.debug(
"Discovering module '%s' in %s for version %s"
% (module_name, repository_name, self.initial_version)
f"Discovering module '{module_name}'"
f" in {repository_name}"
f" for version {self.initial_version}"
)
else:
repository_name = False
logger.error(
"Module %s not found for version %s."
% (module_name, self.initial_version)
f"Module {module_name} not found"
f" for version {self.initial_version}."
)
new_odoo_module = OdooModule(
ctx, self, module_name, repository_name
@ -151,8 +147,7 @@ class Analysis(object):
self, ctx, previous_version, current_version
):
logger.info(
"Analyse change between %s and %s"
% (previous_version, current_version)
f"Analyse change between {previous_version} and {current_version}"
)
# Get changes between the two versions
(
@ -164,8 +159,8 @@ class Analysis(object):
)
if not apriori_module_path:
raise ValueError(
"Unable to find the path of the module %s for the version %s"
% (apriori_module_name, current_version)
f"Unable to find the path of the module {apriori_module_name}"
f" for the version {current_version}."
)
apriori_absolute_path = (
apriori_module_path
@ -189,25 +184,15 @@ class Analysis(object):
state = "renamed"
new_module_name = renamed_modules[odoo_module.name]
logger.debug(
"%s -> %s : %s renamed into %s"
% (
previous_version,
current_version,
odoo_module.name,
new_module_name,
)
f"{previous_version} -> {current_version} :"
f" {odoo_module.name} renamed into {new_module_name}"
)
elif odoo_module.name in merged_modules:
state = "merged"
new_module_name = merged_modules[odoo_module.name]
logger.debug(
"%s -> %s : %s merged into %s"
% (
previous_version,
current_version,
odoo_module.name,
new_module_name,
)
f"{previous_version} -> {current_version} :"
f" {odoo_module.name} merged into {new_module_name}"
)
# Handle new module
@ -218,9 +203,9 @@ class Analysis(object):
)
if not new_addon_path:
raise ValueError(
"The module %s has not been found in the version %s."
f"The module {new_module_name} has not been found"
f" in the version {current_version}."
" Analyse can not be done."
% (new_module_name, current_version)
)
else:
new_repository_name = OdooModule.get_repository_name(
@ -231,12 +216,9 @@ class Analysis(object):
not in self.modules
):
logger.debug(
"Discovering module '%s' in %s for version %s"
% (
new_module_name,
new_repository_name,
current_version,
)
f"Discovering module '{new_module_name}'"
f" in {new_repository_name}"
f" for version {current_version}"
)
new_odoo_module = OdooModule(
ctx, self, new_module_name, new_repository_name
@ -316,7 +298,7 @@ class OdooModule(object):
self.analyse = analyse
self.name = module_name
self.repository = repository_name
self.unique_name = "%s.%s" % (repository_name, module_name)
self.unique_name = f"{repository_name}.{module_name}"
self.ignored = self.is_ignored(ctx, module_name)
self.module_versions = {}
if not repository_name:
@ -345,7 +327,7 @@ class OdooModule(object):
return res
def get_odoo_apps_url(self):
logger.info("Searching %s in the Odoo appstore ..." % self.name)
logger.info(f"Searching {self.name} in the Odoo appstore ...")
url = (
f"https://apps.odoo.com/apps/modules/"
f"{self.analyse.initial_version}/{self.name}/"
@ -360,7 +342,7 @@ class OdooModule(object):
return False
def get_odoo_code_search_url(self):
logger.info("Searching %s in Odoo-Code-Search ..." % self.name)
logger.info(f"Searching {self.name} in Odoo-Code-Search ...")
url = (
f"https://odoo-code-search.com/ocs/search?"
f"q=name%3A%3D{self.name}+version%3A{self.analyse.initial_version}"
@ -674,7 +656,7 @@ class OdooModuleVersion(object):
elif line.startswith("---nothing has changed in this module"):
continue
elif line.startswith("---"):
raise Exception("comment %s not undestood" % line)
raise Exception(f"comment {line} not understood")
if line_type == "model":
self.openupgrade_model_lines += 1
@ -774,9 +756,9 @@ class OdooModuleVersion(object):
return "To analyse"
else:
if self.state == "merged":
return "Merged into %s" % self.target_module
return f"Merged into {self.target_module}"
elif self.state == "renamed":
return "Renamed into %s" % self.target_module
return f"Renamed into {self.target_module}"
elif self.state == "ignored":
return "Ignored"
elif self.state == "normal_loss":
@ -793,4 +775,4 @@ class OdooModuleVersion(object):
elif self.version != self.odoo_module.analyse.final_version:
return "Unported"
else:
return "To port from %s" % last_existing_version.version
return f"To port from {last_existing_version.version}"

View File

@ -29,8 +29,8 @@ def get_postgres_container(ctx):
container = containers[0]
if container.status == "exited":
logger.warning(
"Found container %s in a exited status. Removing it..."
% container_name
f"Found container {container_name} in a exited status."
" Removing it..."
)
container.remove()
else:
@ -39,9 +39,9 @@ def get_postgres_container(ctx):
# Check if volume exists
try:
client.volumes.get(volume_name)
logger.debug("Recovering existing postgres volume: %s" % volume_name)
logger.debug(f"Recovering existing postgres volume: {volume_name}")
except docker.errors.NotFound:
logger.info("Creating Postgres volume: %s" % volume_name)
logger.info(f"Creating Postgres volume: {volume_name}")
client.volumes.create(volume_name)
command = None
@ -51,7 +51,7 @@ def get_postgres_container(ctx):
for key, value in postgres_extra_settings.items():
command += f" -c {key}={value}"
logger.info("Launching Postgres Container. (Image %s)" % image_name)
logger.info(f"Launching Postgres Container. (Image {image_name})")
container = run_container(
image_name,
container_name,
@ -85,10 +85,9 @@ def execute_sql_file(ctx, database, sql_file):
# call psql in the container
if str(ctx.obj["env_folder_path"]) not in str(sql_file):
raise Exception(
"The SQL file %s is not in the"
" main folder %s available"
f"The SQL file {sql_file} is not in the"
f" main folder {ctx.obj['env_folder_path']} available"
" in the postgres container."
% (sql_file, ctx.obj["env_folder_path"])
)
relative_path = Path(
str(sql_file).replace(str(ctx.obj["env_folder_path"]), ".")
@ -99,8 +98,8 @@ def execute_sql_file(ctx, database, sql_file):
"psql --username=odoo --dbname={database} --file {file_path}"
).format(database=database, file_path=container_path)
logger.info(
"Executing the script '%s' in postgres container"
" on database %s" % (relative_path, database)
f"Executing the script '{relative_path}' in postgres container"
f" on database {database}"
)
exec_container(container, command)
@ -130,8 +129,7 @@ def execute_psql_command(
f" {' '.join(psql_args)}"
)
logger.debug(
"Executing the following command in postgres container\n"
"%s" % (command)
f"Executing the following command in postgres container\n{command}"
)
docker_result = exec_container(container, command)
return docker_result.output.decode("utf-8")
@ -164,8 +162,8 @@ def ensure_database(ctx, database: str, state="present", template: str = ""):
if [database] not in result:
return
logger.info("Drop database '%s' ..." % database)
request = "DROP DATABASE {database};".format(database=database)
logger.info(f"Drop database '{database}' ...")
request = f"DROP DATABASE {database};"
execute_psql_command(ctx, request)
@ -195,8 +193,7 @@ def chown_to_local_user(ctx, filepath: os.PathLike):
uid=user_uid, filepath=filepath
)
logger.debug(
"Executing the following command in postgres container: %s"
% (command,)
f"Executing the following command in postgres container:\n{command}"
)
chown_result = exec_container(container, command)
return chown_result.output.decode("utf8")
@ -232,8 +229,7 @@ def execute_pg_dump(
pg_dump_args=pg_dump_args,
)
logger.debug(
"Executing the following command in postgres container: %s"
% (command,)
f"Executing the following command in postgres container:\n{command}"
)
pg_dump_result = exec_container(container, command)

View File

@ -19,7 +19,7 @@ def get_script_folder(ctx, migration_step: dict) -> Path:
def ensure_folder_writable(folder_path: Path):
logger.info("Make writable the folder '%s'" % folder_path)
logger.info(f"Make writable the folder '{folder_path}'")
try:
chmod(["--silent", "--recursive", "o+w", str(folder_path)])
except ProcessExecutionError:
@ -37,7 +37,7 @@ def ensure_folder_exists(
if not folder_path.exists():
cmd = ["--parents", folder_path]
cmd = ["--mode", mode] + cmd
logger.info("Creating folder '%s' ..." % (folder_path))
logger.info(f"Creating folder '{folder_path}' ...")
mkdir(cmd)
if git_ignore_content:
@ -74,9 +74,9 @@ def ensure_file_exists_from_template(
if data == output:
return
log_text = "Updating file '%s' from template ..." % (file_path)
log_text = f"Updating file '{file_path}' from template ..."
else:
log_text = "Creating file '%s' from template ..." % (file_path)
log_text = f"Creating file '{file_path}' from template ..."
with open(file_path, "w") as f:
logger.info(log_text)
@ -97,8 +97,8 @@ def git_aggregate(folder_path: Path, config_path: Path, jobs: int):
with working_directory_keeper:
os.chdir(folder_path)
logger.info(
"Gitaggregate source code for %s. This can take a while ..."
% config_path
f"Gitaggregate source code for {config_path}."
" This can take a while ..."
)
gitaggregate_cmd.run(args)
@ -108,7 +108,7 @@ def get_local_user_id():
def execute_check_output(args_list, working_directory=False):
logger.debug("Execute %s" % " ".join(args_list))
logger.debug(f"Execute {' '.join(args_list)}")
subprocess.check_output(args_list, cwd=working_directory)

View File

@ -24,14 +24,14 @@ def test_cli_run():
[
"run",
"--step=1",
"--database=%s" % db_name,
f"--database={db_name}",
"--init-modules=base",
"--stop-after-init",
],
)
# Ensure that a subfolder filestore/DB_NAME has been created
db_filestore_path = Path("./filestore/filestore/%s" % db_name)
db_filestore_path = Path("./filestore/filestore") / db_name
assert db_filestore_path.exists()
# Ensure that 'base' module is installed

View File

@ -34,7 +34,7 @@ def test_cli_execute_script_python():
[
"run",
"--step=1",
"--database=%s" % db_name,
f"--database={db_name}",
"--init-modules=base",
"--stop-after-init",
],
@ -51,7 +51,7 @@ def test_cli_execute_script_python():
[
"execute-script-python",
"--step=1",
"--database=%s" % db_name,
f"--database={db_name}",
"--script-file-path=post-migration-custom_test.py",
],
)

View File

@ -34,7 +34,7 @@ def test_cli_execute_script_sql():
# TODO call with script-file-path
# to avoid to copy file in scripts/step_xxx folder
cli_runner_invoke(
["execute-script-sql", "--step=1", "--database=%s" % db_name]
["execute-script-sql", "--step=1", f"--database={db_name}"]
)
# Ensure that the request has been done correctly

View File

@ -22,7 +22,7 @@ def test_cli_upgrade():
[
"run",
"--step=1",
"--database=%s" % db_name,
f"--database={db_name}",
"--init-modules=base",
"--stop-after-init",
],
@ -42,7 +42,7 @@ def test_cli_upgrade():
cli_runner_invoke(
[
"upgrade",
"--database=%s" % db_name,
f"--database={db_name}",
"--first-step=1",
"--last-step=3",
],

View File

@ -21,7 +21,7 @@ def test_cli_install_from_csv(mocker):
ensure_database(ctx, db_name, state="absent")
cli_runner_invoke(["install-from-csv", "--database=%s" % db_name])
cli_runner_invoke(["install-from-csv", f"--database={db_name}"])
# Ensure that 'base' is installed
request = (

View File

@ -34,7 +34,7 @@ def test_cli_generate_module_analysis(mocker):
[
"generate-module-analysis",
"--step=2",
"--database=%s" % db_name,
f"--database={db_name}",
"--modules=base",
],
)

View File

@ -24,7 +24,7 @@ def test_cli_psql():
[
"run",
"--step=1",
"--database=%s" % db_name,
f"--database={db_name}",
"--init-modules=base",
"--stop-after-init",
],
@ -49,8 +49,8 @@ def test_cli_psql():
cli_runner_invoke(
[
"psql",
"--database=%s" % db_name,
'--command "%s"' % request,
f"--database={db_name}",
f'--command "{request}"',
"--no-pager",
"--tuples-only",
],
@ -61,8 +61,8 @@ def test_cli_psql():
cli_runner_invoke(
[
"psql",
"--database=%s" % db_name,
'--command "%s"' % request,
f"--database={db_name}",
f'--command "{request}"',
"--no-pager",
"--tuples-only",
"---unkwon-argument",