Skip to content

Commit

Permalink
#425: Fixed type errors found by MyPy
Browse files Browse the repository at this point in the history
  • Loading branch information
tomuben committed Nov 19, 2024
1 parent 9ed4dad commit 505c9f2
Show file tree
Hide file tree
Showing 61 changed files with 344 additions and 288 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def _handle_failure(self, task_error: TaskRuntimeError):
def _print_task_failures(task_error: TaskRuntimeError):
print_err()
print_err("Task failure message: %s" % task_error.msg)
print_err(task_error.__cause__.args[0])
print_err(task_error.__cause__.args[0]) # type: ignore
print_err()

def _handle_success(self):
Expand Down
2 changes: 1 addition & 1 deletion exasol_integration_test_docker_environment/doctor.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,5 +84,5 @@ def health_checkup() -> Iterable[error.ExaError]:
]
for is_fine, diagnosis in examinations:
if not is_fine():
for error_code in diagnosis():
for error_code in diagnosis(): # type: ignore
yield error_code
11 changes: 7 additions & 4 deletions exasol_integration_test_docker_environment/lib/api/api_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@ class HealthProblem(RuntimeError):
class TaskFailures(Exception):
"""Represents a potential cause of a TaskRuntimeError"""

def __init__(self, inner: List[str] = None):
def __init__(self, inner: Optional[List[str]] = None):
super().__init__(self._construct_exception_message(inner))
self.inner = inner

def _construct_exception_message(self, failures: Iterable[str]) -> str:
formatted_task_failures = "\n".join(failures)
return f"Following task failures were caught during the execution:\n{formatted_task_failures}"
def _construct_exception_message(self, failures: Optional[Iterable[str]]) -> str:
if failures is not None:
formatted_task_failures = "\n".join(failures)
return f"Following task failures were caught during the execution:\n{formatted_task_failures}"
else:
return f"No task failures were caught during the execution:"


class TaskRuntimeError(RuntimeError):
Expand Down
55 changes: 27 additions & 28 deletions exasol_integration_test_docker_environment/lib/api/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

class JobCounterSingleton(object):
"""
We use here a Singleton to avoid a unprotected global variable.
We use here a Singleton to avoid an unprotected global variable.
However, this counter needs to be global counter to guarantee unique job ids.
This is needed in case of task that finish in less than a second, to avoid duplicated job ids
"""
Expand All @@ -42,18 +42,18 @@ def __new__(cls):
return cls._instance

def get_next_value(self) -> int:
self._counter += 1
return self._counter
self._counter += 1 # type: ignore
return self._counter # type: ignore


def set_build_config(force_rebuild: bool,
force_rebuild_from: Tuple[str, ...],
force_pull: bool,
log_build_context_content: bool,
output_directory: str,
temporary_base_directory: str,
cache_directory: str,
build_name: str, ):
temporary_base_directory: Optional[str],
cache_directory: Optional[str],
build_name: Optional[str], ):
luigi.configuration.get_config().set('build_config', 'force_rebuild', str(force_rebuild))
luigi.configuration.get_config().set('build_config', 'force_rebuild_from', json.dumps(force_rebuild_from))
luigi.configuration.get_config().set('build_config', 'force_pull', str(force_pull))
Expand All @@ -72,9 +72,8 @@ def set_output_directory(output_directory):
luigi.configuration.get_config().set('build_config', 'output_directory', output_directory)


def set_docker_repository_config(docker_password: str, docker_repository_name: str, docker_username: str,
tag_prefix: str,
kind: str):
def set_docker_repository_config(docker_password: Optional[str], docker_repository_name: Optional[str],
docker_username: Optional[str], tag_prefix: str, kind: str):
config_class = f'{kind}_docker_repository_config'
luigi.configuration.get_config().set(config_class, 'tag_prefix', tag_prefix)
if docker_repository_name is not None:
Expand Down Expand Up @@ -105,8 +104,8 @@ def import_build_steps(flavor_path: Tuple[str, ...]):
path_to_build_steps = Path(path).joinpath("flavor_base/build_steps.py")
module_name_for_build_steps = extract_modulename_for_build_steps(path)
spec = importlib.util.spec_from_file_location(module_name_for_build_steps, path_to_build_steps)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
module = importlib.util.module_from_spec(spec) # type: ignore
spec.loader.exec_module(module) # type: ignore


def generate_root_task(task_class, *args, **kwargs) -> DependencyLoggerBaseTask:
Expand All @@ -117,9 +116,21 @@ def generate_root_task(task_class, *args, **kwargs) -> DependencyLoggerBaseTask:
return task_class(**params)


def _run_task_with_logging_config(
task: DependencyLoggerBaseTask,
log_file_path: Path,
log_level: Optional[str],
use_job_specific_log_file: bool,
workers: int) -> bool:
with _configure_logging(log_file_path, log_level, use_job_specific_log_file) as run_kwargs:
no_scheduling_errors = luigi.build([task], workers=workers,
local_scheduler=True, **run_kwargs)
return no_scheduling_errors


def run_task(task_creator: Callable[[], DependencyLoggerBaseTask], workers: int = 2,
task_dependencies_dot_file: Optional[str] = None,
log_level: str = None, use_job_specific_log_file: bool = False) \
log_level: Optional[str] = None, use_job_specific_log_file: bool = False) \
-> Any:
setup_worker()
task = task_creator()
Expand All @@ -142,18 +153,6 @@ def run_task(task_creator: Callable[[], DependencyLoggerBaseTask], workers: int
task.cleanup(success)


def _run_task_with_logging_config(
task: DependencyLoggerBaseTask,
log_file_path: Path,
log_level: Optional[str],
use_job_specific_log_file: bool,
workers: int) -> bool:
with _configure_logging(log_file_path, log_level, use_job_specific_log_file) as run_kwargs:
no_scheduling_errors = luigi.build([task], workers=workers,
local_scheduler=True, **run_kwargs)
return no_scheduling_errors


def _handle_task_result(
no_scheduling_errors: bool,
success: bool,
Expand Down Expand Up @@ -181,7 +180,7 @@ def _configure_logging(
use_job_specific_log_file: bool) -> Iterator[Dict[str, str]]:
with get_luigi_log_config(log_file_target=log_file_path,
log_level=log_level,
use_job_specific_log_file=use_job_specific_log_file) as luigi_config:
use_job_specific_log_file=use_job_specific_log_file) as luigi_config: # type: Path
no_configure_logging, run_kwargs = _configure_logging_parameter(
log_level=log_level,
luigi_config=luigi_config,
Expand All @@ -199,7 +198,7 @@ def _configure_logging(
yield run_kwargs


def _configure_logging_parameter(log_level: str, luigi_config: Path, use_job_specific_log_file: bool) \
def _configure_logging_parameter(log_level: Optional[str], luigi_config: Path, use_job_specific_log_file: bool) \
-> Tuple[bool, Dict[str, str]]:
if use_job_specific_log_file:
no_configure_logging = False
Expand All @@ -226,13 +225,13 @@ def generate_graph_from_task_dependencies(task: DependencyLoggerBaseTask, task_d


def collect_dependencies(task: DependencyLoggerBaseTask) -> Set[TaskDependency]:
dependencies = set()
dependencies : Set[TaskDependency] = set()
for root, directories, files in os.walk(task._get_dependencies_path_for_job()):
for file in files:
file_path = Path(root).joinpath(file)
with open(file_path) as f:
for line in f.readlines():
task_dependency = TaskDependency.from_json(line) # type: TaskDependency
task_dependency : TaskDependency = TaskDependency.from_json(line) # type: ignore
if task_dependency.state == DependencyState.requested.name:
dependencies.add(task_dependency)
return dependencies
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import functools
from typing import Tuple, Optional, Callable
from typing import Tuple, Optional, Callable, Any
import humanfriendly

from exasol_integration_test_docker_environment.lib.api.common import (
Expand All @@ -26,8 +26,10 @@


def _cleanup(environment_info: EnvironmentInfo) -> None:
remove_docker_container([environment_info.database_info.container_info.container_name])
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
if environment_info.database_info.container_info is not None:
remove_docker_container([environment_info.database_info.container_info.container_name])
if environment_info.database_info.container_info.volume_name is not None:
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
remove_docker_networks([environment_info.network_info.network_name])


Expand Down Expand Up @@ -69,7 +71,7 @@ def spawn_test_environment(
raises: TaskRuntimeError if spawning the test environment fails
"""
def str_or_none(x: any) -> str:
def str_or_none(x: Any) -> Optional[str]:
return str(x) if x is not None else None

parsed_db_mem_size = humanfriendly.parse_size(db_mem_size)
Expand All @@ -78,6 +80,8 @@ def str_or_none(x: any) -> str:
parsed_db_disk_size = humanfriendly.parse_size(db_disk_size)
if parsed_db_disk_size < humanfriendly.parse_size("100 MiB"):
raise ArgumentConstraintError("db_disk_size", "needs to be at least 100 MiB")
db_os_access_value = DbOsAccess[db_os_access] if db_os_access else DbOsAccess.DOCKER_EXEC

set_build_config(False,
tuple(),
False,
Expand All @@ -101,7 +105,7 @@ def str_or_none(x: any) -> str:
docker_runtime=docker_runtime,
docker_db_image_version=docker_db_image_version,
docker_db_image_name=docker_db_image_name,
db_os_access=DbOsAccess[db_os_access],
db_os_access=db_os_access_value,
db_user="sys",
db_password="exasol",
bucketfs_write_password="write",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import functools
from typing import Tuple, Optional, Callable
from typing import Tuple, Optional, Callable, Any
import humanfriendly

from exasol_integration_test_docker_environment.lib.api.common import set_build_config, set_docker_repository_config, \
Expand All @@ -22,12 +22,12 @@
.docker_db_test_environment_parameter import DbOsAccess

def _cleanup(environment_info: EnvironmentInfo) -> None:
if environment_info.test_container_info is None:
if environment_info.test_container_info is not None:
remove_docker_container([environment_info.test_container_info.container_name])
if environment_info.database_info.container_info is not None:
remove_docker_container([environment_info.database_info.container_info.container_name])
else:
remove_docker_container([environment_info.test_container_info.container_name,
environment_info.database_info.container_info.container_name])
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
if environment_info.database_info.container_info.volume_name is not None:
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
remove_docker_networks([environment_info.network_info.network_name])


Expand Down Expand Up @@ -71,7 +71,7 @@ def spawn_test_environment_with_test_container(
raises: TaskRuntimeError if spawning the test environment fails
"""
def str_or_none(x: any) -> str:
def str_or_none(x: Any) -> Optional[str]:
return str(x) if x is not None else None
parsed_db_mem_size = humanfriendly.parse_size(db_mem_size)
if parsed_db_mem_size < humanfriendly.parse_size("1 GiB"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def get_output(self) -> Any:


class BaseTask(Task):
caller_output_path = luigi.ListParameter([], significant=False, visibility=ParameterVisibility.HIDDEN)
job_id = luigi.Parameter()
caller_output_path : List[str] = luigi.ListParameter([], significant=False, visibility=ParameterVisibility.HIDDEN) # type: ignore
job_id : str = luigi.Parameter() # type: ignore

def __init__(self, *args, **kwargs):
self._registered_tasks = []
Expand Down Expand Up @@ -153,7 +153,7 @@ def get_output_path(self) -> Path:

def _get_output_path_for_job(self) -> Path:
return Path(build_config().output_directory,
"jobs", self.job_id)
"jobs", self.job_id) # type: ignore

def _extend_output_path(self):
extension = self.extend_output_path()
Expand Down Expand Up @@ -184,7 +184,7 @@ def get_log_path(self) -> Path:
return path

def get_cache_path(self) -> Path:
path = Path(build_config().output_directory, "cache")
path = Path(str(build_config().output_directory), "cache")
path.mkdir(parents=True, exist_ok=True)
return path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import docker
import time
from docker import DockerClient
from typing import Protocol, runtime_checkable
from typing import Protocol, runtime_checkable, Optional
from docker.models.containers import Container, ExecResult
from exasol_integration_test_docker_environment \
.lib.base.ssh_access import SshKey
Expand Down Expand Up @@ -49,7 +49,7 @@ class DockerExecutor(DbOsExecutor):
def __init__(self, docker_client: DockerClient, container_name: str):
self._client = docker_client
self._container_name = container_name
self._container = None
self._container : Optional[Container] = None

def __enter__(self):
self._container = self._client.containers.get(self._container_name)
Expand All @@ -62,6 +62,7 @@ def __del__(self):
self.close()

def exec(self, cmd: str) -> ExecResult:
assert self._container
return self._container.exec_run(cmd)

def close(self):
Expand All @@ -75,7 +76,7 @@ class SshExecutor(DbOsExecutor):
def __init__(self, connect_string: str, key_file: str):
self._connect_string = connect_string
self._key_file = key_file
self._connection = None
self._connection : Optional[fabric.Connection] = None

def __enter__(self):
key = SshKey.read_from(self._key_file)
Expand All @@ -92,6 +93,7 @@ def __del__(self):
self.close()

def exec(self, cmd: str) -> ExecResult:
assert self._connection
result = self._connection.run(cmd, warn=True, hide=True)
output = result.stdout.encode("utf-8")
return ExecResult(result.exited, output)
Expand Down Expand Up @@ -146,6 +148,7 @@ def executor(self) -> DbOsExecutor:
class SshExecFactory(DbOsExecFactory):
@classmethod
def from_database_info(cls, info: DatabaseInfo):
assert info.ssh_info
return SshExecFactory(
f"{info.ssh_info.user}@{info.host}:{info.ports.ssh}",
info.ssh_info.key_file,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class FlavorsBaseTask(DependencyLoggerBaseTask):
flavor_paths = luigi.ListParameter()
flavor_paths : List[str] = luigi.ListParameter() # type: ignore

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from pathlib import Path
from typing import Any
from typing import Any, Optional

import jsonpickle
from luigi import LocalTarget
Expand All @@ -10,7 +10,7 @@ class JsonPickleTarget(LocalTarget):
def __init__(self, path: Path, is_tmp: bool = False):
super().__init__(path=str(path), is_tmp=is_tmp)

def write(self, obj:Any, indent:int=None):
def write(self, obj:Any, indent: Optional[int] = None):
jsonpickle.set_preferred_backend('simplejson')
jsonpickle.set_encoder_options('simplejson', indent=indent)
json_str=jsonpickle.encode(obj)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import tempfile
from pathlib import Path
from typing import Optional, Callable
from typing import Optional, Callable, Generator

import jinja2
import logging
Expand Down Expand Up @@ -52,16 +52,16 @@ def restore_logger(logger_creator: Callable[[], logging.Logger]):
}
yield
after_logger = logger_creator()
after_logger.level = logger_info[LOG_LEVEL]
after_logger.handlers = logger_info[HANDLERS]
after_logger.filters = logger_info[FILTERS]
after_logger.propagate = logger_info[PROPAGATE]
after_logger.level = logger_info[LOG_LEVEL] # type: ignore
after_logger.handlers = logger_info[HANDLERS] # type: ignore
after_logger.filters = logger_info[FILTERS] # type: ignore
after_logger.propagate = logger_info[PROPAGATE] # type: ignore


@contextlib.contextmanager
def get_luigi_log_config(log_file_target: Path,
use_job_specific_log_file: bool,
log_level: Optional[str] = None) -> Path:
log_level: Optional[str] = None) -> Generator[Path, None, None]:
"""
Yields a context manager containing the path of the log-config file.
log_file_target contains the location of the log-file.
Expand Down
Loading

0 comments on commit 505c9f2

Please sign in to comment.