diff --git a/connections/github.py b/connections/github.py index ef0b33d3..37cf205c 100644 --- a/connections/github.py +++ b/connections/github.py @@ -46,10 +46,10 @@ def get_token(): global _token cfg = config.read_config() - github_cfg = cfg['github'] - app_id = github_cfg.get('app_id') - installation_id = github_cfg.get('installation_id') - private_key_path = github_cfg.get('private_key') + github_cfg = cfg[config.SECTION_GITHUB] + app_id = github_cfg.get(config.GITHUB_SETTING_APP_ID) + installation_id = github_cfg.get(config.GITHUB_SETTING_INSTALLATION_ID) + private_key_path = github_cfg.get(config.GITHUB_SETTING_PRIVATE_KEY) private_key = '' with open(private_key_path, 'r') as private_key_file: diff --git a/eessi_bot_event_handler.py b/eessi_bot_event_handler.py index 49f10238..fa8bb062 100644 --- a/eessi_bot_event_handler.py +++ b/eessi_bot_event_handler.py @@ -26,10 +26,8 @@ # Local application imports (anything from EESSI/eessi-bot-software-layer) from connections import github -import tasks.build as build from tasks.build import check_build_permission, get_architecture_targets, get_repo_cfg, \ request_bot_build_issue_comments, submit_build_jobs -import tasks.deploy as deploy from tasks.deploy import deploy_built_artefacts from tools import config from tools.args import event_handler_parse @@ -39,11 +37,59 @@ from tools.pr_comments import create_comment -APP_NAME = "app_name" -BOT_CONTROL = "bot_control" -COMMAND_RESPONSE_FMT = "command_response_fmt" -GITHUB = "github" -REPO_TARGET_MAP = "repo_target_map" +REQUIRED_CONFIG = { + config.SECTION_ARCHITECTURETARGETS: [ + config.ARCHITECTURETARGETS_SETTING_ARCH_TARGET_MAP], # required + config.SECTION_BOT_CONTROL: [ + config.BOT_CONTROL_SETTING_COMMAND_PERMISSION, # required + config.BOT_CONTROL_SETTING_COMMAND_RESPONSE_FMT], # required + config.SECTION_BUILDENV: [ + config.BUILDENV_SETTING_BUILD_JOB_SCRIPT, # required + config.BUILDENV_SETTING_BUILD_LOGS_DIR, # optional+recommended + config.BUILDENV_SETTING_BUILD_PERMISSION, # optional+recommended + config.BUILDENV_SETTING_CONTAINER_CACHEDIR, # optional+recommended + # config.BUILDENV_SETTING_CVMFS_CUSTOMIZATIONS, # optional + # config.BUILDENV_SETTING_HTTPS_PROXY, # optional + # config.BUILDENV_SETTING_HTTP_PROXY, # optional + config.BUILDENV_SETTING_JOBS_BASE_DIR, # required + # config.BUILDENV_SETTING_LOAD_MODULES, # optional + config.BUILDENV_SETTING_LOCAL_TMP, # required + config.BUILDENV_SETTING_NO_BUILD_PERMISSION_COMMENT, # required + config.BUILDENV_SETTING_SHARED_FS_PATH, # optional+recommended + # config.BUILDENV_SETTING_SLURM_PARAMS, # optional + config.BUILDENV_SETTING_SUBMIT_COMMAND], # required + config.SECTION_DEPLOYCFG: [ + config.DEPLOYCFG_SETTING_ARTEFACT_PREFIX, # (required) + config.DEPLOYCFG_SETTING_ARTEFACT_UPLOAD_SCRIPT, # required + config.DEPLOYCFG_SETTING_BUCKET_NAME, # required + config.DEPLOYCFG_SETTING_DEPLOY_PERMISSION, # optional+recommended + # config.DEPLOYCFG_SETTING_ENDPOINT_URL, # optional + config.DEPLOYCFG_SETTING_METADATA_PREFIX, # (required) + config.DEPLOYCFG_SETTING_NO_DEPLOY_PERMISSION_COMMENT, # required + config.DEPLOYCFG_SETTING_UPLOAD_POLICY], # required + config.SECTION_DOWNLOAD_PR_COMMENTS: [ + config.DOWNLOAD_PR_COMMENTS_SETTING_CURL_FAILURE, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_CURL_TIP, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_FAILURE, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_TIP, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_FAILURE, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_TIP, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_FAILURE, # required + config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_TIP], # required + config.SECTION_EVENT_HANDLER: [ + config.EVENT_HANDLER_SETTING_LOG_PATH], # required + config.SECTION_GITHUB: [ + config.GITHUB_SETTING_APP_ID, # required + config.GITHUB_SETTING_APP_NAME, # required + config.GITHUB_SETTING_INSTALLATION_ID, # required + config.GITHUB_SETTING_PRIVATE_KEY], # required + config.SECTION_REPO_TARGETS: [ + config.REPO_TARGETS_SETTING_REPO_TARGET_MAP, # required + config.REPO_TARGETS_SETTING_REPOS_CFG_DIR], # required + config.SECTION_SUBMITTED_JOB_COMMENTS: [ + config.SUBMITTED_JOB_COMMENTS_SETTING_INITIAL_COMMENT, # required + config.SUBMITTED_JOB_COMMENTS_SETTING_AWAITS_RELEASE] # required + } class EESSIBotSoftwareLayer(PyGHee): @@ -62,8 +108,8 @@ def __init__(self, *args, **kwargs): super(EESSIBotSoftwareLayer, self).__init__(*args, **kwargs) self.cfg = config.read_config() - event_handler_cfg = self.cfg['event_handler'] - self.logfile = event_handler_cfg.get('log_path') + event_handler_cfg = self.cfg[config.SECTION_EVENT_HANDLER] + self.logfile = event_handler_cfg.get(config.EVENT_HANDLER_SETTING_LOG_PATH) def log(self, msg, *args): """ @@ -110,8 +156,8 @@ def handle_issue_comment_event(self, event_info, log_file=None): # log level is set to debug self.log(f"Comment in {issue_url} (owned by @{owner}) {action} by @{sender}") - app_name = self.cfg[GITHUB][APP_NAME] - command_response_fmt = self.cfg[BOT_CONTROL][COMMAND_RESPONSE_FMT] + app_name = self.cfg[config.SECTION_GITHUB][config.GITHUB_SETTING_APP_NAME] + command_response_fmt = self.cfg[config.SECTION_BOT_CONTROL][config.BOT_CONTROL_SETTING_COMMAND_RESPONSE_FMT] # currently, only commands in new comments are supported # - commands have the syntax 'bot: COMMAND [ARGS*]' @@ -301,8 +347,8 @@ def handle_pull_request_labeled_event(self, event_info, pr): request_body = event_info['raw_request_body'] repo_name = request_body['repository']['full_name'] pr_number = request_body['pull_request']['number'] - app_name = self.cfg[GITHUB][APP_NAME] - command_response_fmt = self.cfg[BOT_CONTROL][COMMAND_RESPONSE_FMT] + app_name = self.cfg[config.SECTION_GITHUB][config.GITHUB_SETTING_APP_NAME] + command_response_fmt = self.cfg[config.SECTION_BOT_CONTROL][config.BOT_CONTROL_SETTING_COMMAND_RESPONSE_FMT] comment_body = command_response_fmt.format( app_name=app_name, comment_response=msg, @@ -330,7 +376,7 @@ def handle_pull_request_opened_event(self, event_info, pr): PyGithub, not the github from the internal connections module) """ self.log("PR opened: waiting for label bot:build") - app_name = self.cfg[GITHUB][APP_NAME] + app_name = self.cfg[config.SECTION_GITHUB][config.GITHUB_SETTING_APP_NAME] # TODO check if PR already has a comment with arch targets and # repositories arch_map = get_architecture_targets(self.cfg) @@ -343,7 +389,8 @@ def handle_pull_request_opened_event(self, event_info, pr): comment += f"{', '.join([f'`{arch}`' for arch in architectures])}" else: comment += "none" - repositories = list(set([repo_id for repo_ids in repo_cfg[REPO_TARGET_MAP].values() for repo_id in repo_ids])) + repositories = list(set([repo_id for repo_ids in repo_cfg[config.REPO_TARGETS_SETTING_REPO_TARGET_MAP].values() + for repo_id in repo_ids])) comment += "\n- repositories: " if len(repositories) > 0: comment += f"{', '.join([f'`{repo_id}`' for repo_id in repositories])}" @@ -547,9 +594,7 @@ def start(self, app, port=3000): print(port_info) self.log(port_info) - event_handler_cfg = self.cfg['event_handler'] - my_logfile = event_handler_cfg.get('log_path') - log_file_info = "logging in to %s" % my_logfile + log_file_info = "logging in to %s" % self.logfile print(log_file_info) self.log(log_file_info) waitress.serve(app, listen='*:%s' % port) @@ -563,13 +608,8 @@ def main(): """ opts = event_handler_parse() - required_config = { - build.SUBMITTED_JOB_COMMENTS: [build.INITIAL_COMMENT, build.AWAITS_RELEASE], - build.BUILDENV: [build.NO_BUILD_PERMISSION_COMMENT], - deploy.DEPLOYCFG: [deploy.NO_DEPLOY_PERMISSION_COMMENT] - } # config is read and checked for settings to raise an exception early when the event_handler starts. - config.check_required_cfg_settings(required_config) + config.check_required_cfg_settings(REQUIRED_CONFIG) github.connect() if opts.file: diff --git a/eessi_bot_job_manager.py b/eessi_bot_job_manager.py index 81d4f3f7..e754059d 100644 --- a/eessi_bot_job_manager.py +++ b/eessi_bot_job_manager.py @@ -48,21 +48,27 @@ from tools.pr_comments import get_submitted_job_comment, update_comment -AWAITS_LAUNCH = "awaits_launch" -FINISHED_JOB_COMMENTS = "finished_job_comments" -JOB_RESULT_COMMENT_DESCRIPTION = "comment_description" -JOB_RESULT_UNKNOWN_FMT = "job_result_unknown_fmt" -JOB_TEST_COMMENT_DESCRIPTION = "comment_description" -JOB_TEST_UNKNOWN_FMT = "job_test_unknown_fmt" -NEW_JOB_COMMENTS = "new_job_comments" -RUNNING_JOB = "running_job" -RUNNING_JOB_COMMENTS = "running_job_comments" - +# settings that are required in 'app.cfg' REQUIRED_CONFIG = { - FINISHED_JOB_COMMENTS: [JOB_RESULT_UNKNOWN_FMT, JOB_TEST_UNKNOWN_FMT], - NEW_JOB_COMMENTS: [AWAITS_LAUNCH], - RUNNING_JOB_COMMENTS: [RUNNING_JOB] -} + config.SECTION_FINISHED_JOB_COMMENTS: [ + config.FINISHED_JOB_COMMENTS_SETTING_JOB_RESULT_UNKNOWN_FMT, # required + config.FINISHED_JOB_COMMENTS_SETTING_JOB_TEST_UNKNOWN_FMT], # required + config.SECTION_GITHUB: [ + config.GITHUB_SETTING_APP_ID, # required + # config.GITHUB_SETTING_APP_NAME, # unused + config.GITHUB_SETTING_INSTALLATION_ID, # required + config.GITHUB_SETTING_PRIVATE_KEY], # required + config.SECTION_JOB_MANAGER: [ + config.JOB_MANAGER_SETTING_LOG_PATH, # required + config.JOB_MANAGER_SETTING_JOB_IDS_DIR, # required + config.JOB_MANAGER_SETTING_POLL_COMMAND, # required + config.JOB_MANAGER_SETTING_POLL_INTERVAL, # optional+recommended + config.JOB_MANAGER_SETTING_SCONTROL_COMMAND], # required + config.SECTION_NEW_JOB_COMMENTS: [ + config.NEW_JOB_COMMENTS_SETTING_AWAITS_LAUNCH], # required + config.SECTION_RUNNING_JOB_COMMENTS: [ + config.RUNNING_JOB_COMMENTS_SETTING_RUNNING_JOB] # required + } class EESSIBotSoftwareLayerJobManager: @@ -77,8 +83,8 @@ def __init__(self): configuration to set the path to the logfile. """ cfg = config.read_config() - job_manager_cfg = cfg['job_manager'] - self.logfile = job_manager_cfg.get('log_path') + job_manager_cfg = cfg[config.SECTION_JOB_MANAGER] + self.logfile = job_manager_cfg.get(config.JOB_MANAGER_SETTING_LOG_PATH) def get_current_jobs(self): """ @@ -340,10 +346,10 @@ def process_new_job(self, new_job): # update status table if we found a comment if "comment_id" in new_job: - new_job_comments_cfg = config.read_config()[NEW_JOB_COMMENTS] + new_job_comments_cfg = config.read_config()[config.SECTION_NEW_JOB_COMMENTS] dt = datetime.now(timezone.utc) update = "\n|%s|released|" % dt.strftime("%b %d %X %Z %Y") - update += f"{new_job_comments_cfg[AWAITS_LAUNCH]}|" + update += f"{new_job_comments_cfg[config.NEW_JOB_COMMENTS_SETTING_AWAITS_LAUNCH]}|" update_comment(new_job["comment_id"], pr, update) else: log( @@ -412,8 +418,9 @@ def process_running_jobs(self, running_job): if "comment_id" in running_job: dt = datetime.now(timezone.utc) - running_job_comments_cfg = config.read_config()[RUNNING_JOB_COMMENTS] - running_msg = running_job_comments_cfg[RUNNING_JOB].format(job_id=running_job['jobid']) + running_job_comments_cfg = config.read_config()[config.SECTION_RUNNING_JOB_COMMENTS] + running_msg_fmt = running_job_comments_cfg[config.RUNNING_JOB_COMMENTS_SETTING_RUNNING_JOB] + running_msg = running_msg_fmt.format(job_id=running_job['jobid']) if "comment_body" in running_job and running_msg in running_job["comment_body"]: log("Not updating comment, '%s' already found" % running_msg) else: @@ -476,7 +483,7 @@ def process_finished_job(self, finished_job): # status = {SUCCESS,FAILURE,UNKNOWN} # obtain format templates from app.cfg - finished_job_comments_cfg = config.read_config()[FINISHED_JOB_COMMENTS] + finished_job_comments_cfg = config.read_config()[config.SECTION_FINISHED_JOB_COMMENTS] # check if _bot_jobJOBID.result exits job_result_file = f"_bot_job{job_id}.result" @@ -485,13 +492,13 @@ def process_finished_job(self, finished_job): job_metadata.JOB_RESULT_SECTION, self.logfile) - job_result_unknown_fmt = finished_job_comments_cfg[JOB_RESULT_UNKNOWN_FMT] + job_result_unknown_fmt = finished_job_comments_cfg[config.FINISHED_JOB_COMMENTS_SETTING_JOB_RESULT_UNKNOWN_FMT] # set fallback comment_description in case no result file was found # (job_metadata.get_section_from_file returned None) comment_description = job_result_unknown_fmt.format(filename=job_result_file) if job_results: # get preformatted comment_description or use previously set default for unknown - comment_description = job_results.get(JOB_RESULT_COMMENT_DESCRIPTION, comment_description) + comment_description = job_results.get(job_metadata.JOB_RESULT_COMMENT_DESCRIPTION, comment_description) # report to log log(f"{fn}(): finished job {job_id}\n" @@ -514,13 +521,13 @@ def process_finished_job(self, finished_job): job_metadata.JOB_TEST_SECTION, self.logfile) - job_test_unknown_fmt = finished_job_comments_cfg[JOB_TEST_UNKNOWN_FMT] + job_test_unknown_fmt = finished_job_comments_cfg[config.FINISHED_JOB_COMMENTS_SETTING_JOB_TEST_UNKNOWN_FMT] # set fallback comment_description in case no test file was found # (job_metadata.get_section_from_file returned None) comment_description = job_test_unknown_fmt.format(filename=job_test_file) if job_tests: # get preformatted comment_description or use previously set default for unknown - comment_description = job_tests.get(JOB_TEST_COMMENT_DESCRIPTION, comment_description) + comment_description = job_tests.get(job_metadata.JOB_TEST_COMMENT_DESCRIPTION, comment_description) # report to log log(f"{fn}(): finished job {job_id}, test suite result\n" @@ -611,16 +618,16 @@ def main(): job_manager.scontrol_command = "" if max_iter != 0: cfg = config.read_config() - job_mgr = cfg["job_manager"] - job_manager.job_ids_dir = job_mgr.get("job_ids_dir") + job_mgr = cfg[config.SECTION_JOB_MANAGER] + job_manager.job_ids_dir = job_mgr.get(config.JOB_MANAGER_SETTING_JOB_IDS_DIR) job_manager.submitted_jobs_dir = os.path.join( job_manager.job_ids_dir, "submitted" ) - job_manager.poll_command = job_mgr.get("poll_command") or False - poll_interval = int(job_mgr.get("poll_interval") or 0) + job_manager.poll_command = job_mgr.get(config.JOB_MANAGER_SETTING_POLL_COMMAND) or False + poll_interval = int(job_mgr.get(config.JOB_MANAGER_SETTING_POLL_INTERVAL) or 0) if poll_interval <= 0: poll_interval = 60 - job_manager.scontrol_command = job_mgr.get("scontrol_command") or False + job_manager.scontrol_command = job_mgr.get(config.JOB_MANAGER_SETTING_SCONTROL_COMMAND) or False os.makedirs(job_manager.submitted_jobs_dir, exist_ok=True) # max_iter diff --git a/tasks/build.py b/tasks/build.py index ace0c72b..82a0911e 100644 --- a/tasks/build.py +++ b/tasks/build.py @@ -31,70 +31,18 @@ # Local application imports (anything from EESSI/eessi-bot-software-layer) from connections import github -from tools import config, pr_comments, run_cmd -from tools.job_metadata import create_metadata_file - - -APP_NAME = "app_name" -ARCHITECTURE_TARGETS = "architecturetargets" -AWAITS_RELEASE = "awaits_release" -BUILDENV = "buildenv" -BUILD_JOB_SCRIPT = "build_job_script" -BUILD_LOGS_DIR = "build_logs_dir" -BUILD_PERMISSION = "build_permission" -CFG_DIRNAME = "cfg" -CONTAINER_CACHEDIR = "container_cachedir" -CURL_FAILURE = "curl_failure" -CURL_TIP = "curl_tip" -CVMFS_CUSTOMIZATIONS = "cvmfs_customizations" +from tools import config, cvmfs_repository, job_metadata, pr_comments, run_cmd + + +# defaults (used if not specified via, eg, 'app.cfg') DEFAULT_JOB_TIME_LIMIT = "24:00:00" -DOWNLOAD_PR_COMMENTS = "download_pr_comments" -ERROR_CURL = "curl" -ERROR_GIT_APPLY = "git apply" -ERROR_GIT_CHECKOUT = "git checkout" -ERROR_GIT_CLONE = "curl" -ERROR_NONE = "none" -GITHUB = "github" -GIT_CLONE_FAILURE = "git_clone_failure" -GIT_CLONE_TIP = "git_clone_tip" -GIT_CHECKOUT_FAILURE = "git_checkout_failure" -GIT_CHECKOUT_TIP = "git_checkout_tip" -GIT_APPLY_FAILURE = "git_apply_failure" -GIT_APPLY_TIP = "git_apply_tip" -HTTPS_PROXY = "https_proxy" -HTTP_PROXY = "http_proxy" -INITIAL_COMMENT = "initial_comment" -JOBS_BASE_DIR = "jobs_base_dir" -JOB_ARCHITECTURE = "architecture" -JOB_CFG_FILENAME = "job.cfg" -JOB_CONTAINER = "container" -JOB_LOCAL_TMP = "local_tmp" -JOB_HTTPS_PROXY = "https_proxy" -JOB_HTTP_PROXY = "http_proxy" -JOB_LOAD_MODULES = "load_modules" -JOB_OS_TYPE = "os_type" -JOB_REPOSITORY = "repository" -JOB_REPOS_CFG_DIR = "repos_cfg_dir" -JOB_REPO_ID = "repo_id" -JOB_REPO_NAME = "repo_name" -JOB_REPO_VERSION = "repo_version" -JOB_SITECONFIG = "site_config" -JOB_SOFTWARE_SUBDIR = "software_subdir" -LOAD_MODULES = "load_modules" -LOCAL_TMP = "local_tmp" -NO_BUILD_PERMISSION_COMMENT = "no_build_permission_comment" -REPOS_CFG_DIR = "repos_cfg_dir" -REPOS_REPO_NAME = "repo_name" -REPOS_REPO_VERSION = "repo_version" -REPOS_CONFIG_BUNDLE = "config_bundle" -REPOS_CONFIG_MAP = "config_map" -REPOS_CONTAINER = "container" -REPO_TARGETS = "repo_targets" -REPO_TARGET_MAP = "repo_target_map" -SHARED_FS_PATH = "shared_fs_path" -SLURM_PARAMS = "slurm_params" -SUBMITTED_JOB_COMMENTS = "submitted_job_comments" -SUBMIT_COMMAND = "submit_command" + +# error codes used in this file +_ERROR_CURL = "curl" +_ERROR_GIT_APPLY = "git apply" +_ERROR_GIT_CHECKOUT = "git checkout" +_ERROR_GIT_CLONE = "curl" +_ERROR_NONE = "none" Job = namedtuple('Job', ('working_dir', 'arch_target', 'repo_id', 'slurm_opts', 'year_month', 'pr_id')) @@ -117,44 +65,45 @@ def get_build_env_cfg(cfg): """ fn = sys._getframe().f_code.co_name - buildenv = cfg[BUILDENV] + buildenv = cfg[config.SECTION_BUILDENV] - jobs_base_dir = buildenv.get(JOBS_BASE_DIR) + jobs_base_dir = buildenv.get(config.BUILDENV_SETTING_JOBS_BASE_DIR) log(f"{fn}(): jobs_base_dir '{jobs_base_dir}'") - config_data = {JOBS_BASE_DIR: jobs_base_dir} - local_tmp = buildenv.get(LOCAL_TMP) + config_data = {config.BUILDENV_SETTING_JOBS_BASE_DIR: jobs_base_dir} + + local_tmp = buildenv.get(config.BUILDENV_SETTING_LOCAL_TMP) log(f"{fn}(): local_tmp '{local_tmp}'") - config_data[LOCAL_TMP] = local_tmp + config_data[config.BUILDENV_SETTING_LOCAL_TMP] = local_tmp - build_job_script = buildenv.get(BUILD_JOB_SCRIPT) + build_job_script = buildenv.get(config.BUILDENV_SETTING_BUILD_JOB_SCRIPT) log(f"{fn}(): build_job_script '{build_job_script}'") - config_data[BUILD_JOB_SCRIPT] = build_job_script + config_data[config.BUILDENV_SETTING_BUILD_JOB_SCRIPT] = build_job_script - submit_command = buildenv.get(SUBMIT_COMMAND) + submit_command = buildenv.get(config.BUILDENV_SETTING_SUBMIT_COMMAND) log(f"{fn}(): submit_command '{submit_command}'") - config_data[SUBMIT_COMMAND] = submit_command + config_data[config.BUILDENV_SETTING_SUBMIT_COMMAND] = submit_command - slurm_params = buildenv.get(SLURM_PARAMS) + slurm_params = buildenv.get(config.BUILDENV_SETTING_SLURM_PARAMS) # always submit jobs with hold set, so job manager can release them slurm_params += ' --hold' log(f"{fn}(): slurm_params '{slurm_params}'") - config_data[SLURM_PARAMS] = slurm_params + config_data[config.BUILDENV_SETTING_SLURM_PARAMS] = slurm_params - shared_fs_path = buildenv.get(SHARED_FS_PATH) + shared_fs_path = buildenv.get(config.BUILDENV_SETTING_SHARED_FS_PATH) log(f"{fn}(): shared_fs_path: '{shared_fs_path}'") - config_data[SHARED_FS_PATH] = shared_fs_path + config_data[config.BUILDENV_SETTING_SHARED_FS_PATH] = shared_fs_path - container_cachedir = buildenv.get(CONTAINER_CACHEDIR) + container_cachedir = buildenv.get(config.BUILDENV_SETTING_CONTAINER_CACHEDIR) log(f"{fn}(): container_cachedir '{container_cachedir}'") - config_data[CONTAINER_CACHEDIR] = container_cachedir + config_data[config.BUILDENV_SETTING_CONTAINER_CACHEDIR] = container_cachedir - build_logs_dir = buildenv.get(BUILD_LOGS_DIR) + build_logs_dir = buildenv.get(config.BUILDENV_SETTING_BUILD_LOGS_DIR) log(f"{fn}(): build_logs_dir '{build_logs_dir}'") - config_data[BUILD_LOGS_DIR] = build_logs_dir + config_data[config.BUILDENV_SETTING_BUILD_LOGS_DIR] = build_logs_dir cvmfs_customizations = {} try: - cvmfs_customizations_str = buildenv.get(CVMFS_CUSTOMIZATIONS) + cvmfs_customizations_str = buildenv.get(config.BUILDENV_SETTING_CVMFS_CUSTOMIZATIONS) log("{fn}(): cvmfs_customizations '{cvmfs_customizations_str}'") if cvmfs_customizations_str is not None: @@ -165,19 +114,19 @@ def get_build_env_cfg(cfg): print(e) error(f"{fn}(): Value for cvmfs_customizations ({cvmfs_customizations_str}) could not be decoded.") - config_data[CVMFS_CUSTOMIZATIONS] = cvmfs_customizations + config_data[config.BUILDENV_SETTING_CVMFS_CUSTOMIZATIONS] = cvmfs_customizations - http_proxy = buildenv.get(HTTP_PROXY, None) + http_proxy = buildenv.get(config.BUILDENV_SETTING_HTTP_PROXY, None) log(f"{fn}(): http_proxy '{http_proxy}'") - config_data[HTTP_PROXY] = http_proxy + config_data[config.BUILDENV_SETTING_HTTP_PROXY] = http_proxy - https_proxy = buildenv.get(HTTPS_PROXY, None) + https_proxy = buildenv.get(config.BUILDENV_SETTING_HTTPS_PROXY, None) log(f"{fn}(): https_proxy '{https_proxy}'") - config_data[HTTPS_PROXY] = https_proxy + config_data[config.BUILDENV_SETTING_HTTPS_PROXY] = https_proxy - load_modules = buildenv.get(LOAD_MODULES, None) + load_modules = buildenv.get(config.BUILDENV_SETTING_LOAD_MODULES, None) log(f"{fn}(): load_modules '{load_modules}'") - config_data[LOAD_MODULES] = load_modules + config_data[config.BUILDENV_SETTING_LOAD_MODULES] = load_modules return config_data @@ -197,9 +146,9 @@ def get_architecture_targets(cfg): """ fn = sys._getframe().f_code.co_name - architecture_targets = cfg[ARCHITECTURE_TARGETS] + architecture_targets = cfg[config.SECTION_ARCHITECTURETARGETS] - arch_target_map = json.loads(architecture_targets.get('arch_target_map')) + arch_target_map = json.loads(architecture_targets.get(config.ARCHITECTURETARGETS_SETTING_ARCH_TARGET_MAP)) log(f"{fn}(): arch target map '{json.dumps(arch_target_map)}'") return arch_target_map @@ -215,10 +164,11 @@ def get_repo_cfg(cfg): Returns: (dict): dictionary containing repository settings as follows - - {REPOS_CFG_DIR: path to repository config directory as defined in 'app.cfg'} - - {REPO_TARGET_MAP: json of REPO_TARGET_MAP value as defined in 'app.cfg'} - - for all sections [JOB_REPO_ID] defined in REPOS_CFG_DIR/repos.cfg add a - mapping {JOB_REPO_ID: dictionary containing settings of that section} + - {config.REPO_TARGETS_SETTING_REPOS_CFG_DIR: path to repository config directory as defined in 'app.cfg'} + - {config.REPO_TARGETS_SETTING_REPO_TARGET_MAP: json of + config.REPO_TARGETS_SETTING_REPO_TARGET_MAP value as defined in 'app.cfg'} + - for all sections [repo_id] defined in config.REPO_TARGETS_SETTING_REPOS_CFG_DIR/repos.cfg add a + mapping {repo_id: dictionary containing settings of that section} """ fn = sys._getframe().f_code.co_name @@ -228,13 +178,14 @@ def get_repo_cfg(cfg): if repo_cfg: return repo_cfg - repo_cfg_org = cfg[REPO_TARGETS] + repo_cfg_org = cfg[config.SECTION_REPO_TARGETS] repo_cfg = {} - repo_cfg[REPOS_CFG_DIR] = repo_cfg_org.get(REPOS_CFG_DIR, None) + settings_repos_cfg_dir = config.REPO_TARGETS_SETTING_REPOS_CFG_DIR + repo_cfg[settings_repos_cfg_dir] = repo_cfg_org.get(settings_repos_cfg_dir, None) repo_map = {} try: - repo_map_str = repo_cfg_org.get(REPO_TARGET_MAP) + repo_map_str = repo_cfg_org.get(config.REPO_TARGETS_SETTING_REPO_TARGET_MAP) log(f"{fn}(): repo_map '{repo_map_str}'") if repo_map_str is not None: @@ -245,13 +196,13 @@ def get_repo_cfg(cfg): print(err) error(f"{fn}(): Value for repo_map ({repo_map_str}) could not be decoded.") - repo_cfg[REPO_TARGET_MAP] = repo_map + repo_cfg[config.REPO_TARGETS_SETTING_REPO_TARGET_MAP] = repo_map - if repo_cfg[REPOS_CFG_DIR] is None: + if repo_cfg[config.REPO_TARGETS_SETTING_REPOS_CFG_DIR] is None: return repo_cfg # add entries for sections from repos.cfg (one dictionary per section) - repos_cfg_file = os.path.join(repo_cfg[REPOS_CFG_DIR], 'repos.cfg') + repos_cfg_file = os.path.join(repo_cfg[config.REPO_TARGETS_SETTING_REPOS_CFG_DIR], 'repos.cfg') log(f"{fn}(): repos_cfg_file '{repos_cfg_file}'") try: repos_cfg = configparser.ConfigParser() @@ -271,7 +222,7 @@ def get_repo_cfg(cfg): config_map = {} try: - config_map_str = repos_cfg[repo_id].get(REPOS_CONFIG_MAP) + config_map_str = repos_cfg[repo_id].get(cvmfs_repository.REPOS_CFG_CONFIG_MAP) log(f"{fn}(): config_map '{config_map_str}'") if config_map_str is not None: @@ -282,7 +233,7 @@ def get_repo_cfg(cfg): print(err) error(f"{fn}(): Value for config_map ({config_map_str}) could not be decoded.") - repo_cfg[repo_id][REPOS_CONFIG_MAP] = config_map + repo_cfg[repo_id][cvmfs_repository.REPOS_CFG_CONFIG_MAP] = config_map # print full repo_cfg for debugging purposes log(f"{fn}(): complete repo_cfg that was just read: {json.dumps(repo_cfg, indent=4)}") @@ -295,9 +246,9 @@ def create_pr_dir(pr, cfg, event_info): Create working directory for job to be submitted. Full path to the working directory has the format - JOBS_BASE_DIR/./pr_/event_/run_ + config.BUILDENV_SETTING_JOBS_BASE_DIR/./pr_/event_/run_ - where JOBS_BASE_DIR is defined in the configuration (see 'app.cfg'), year + where config.BUILDENV_SETTING_JOBS_BASE_DIR is defined in the configuration (see 'app.cfg'), year contains four digits, and month contains two digits Args: @@ -317,13 +268,13 @@ def create_pr_dir(pr, cfg, event_info): # create directory structure (see discussion of options in # https://github.com/EESSI/eessi-bot-software-layer/issues/7) # - # JOBS_BASE_DIR/./pr_/event_/run_ + # config.BUILDENV_SETTING_JOBS_BASE_DIR/./pr_/event_/run_ # - # where JOBS_BASE_DIR is defined in the configuration (see 'app.cfg'), year + # where config.BUILDENV_SETTING_JOBS_BASE_DIR is defined in the configuration (see 'app.cfg'), year # contains four digits, and month contains two digits build_env_cfg = get_build_env_cfg(cfg) - jobs_base_dir = build_env_cfg[JOBS_BASE_DIR] + jobs_base_dir = build_env_cfg[config.BUILDENV_SETTING_JOBS_BASE_DIR] year_month = datetime.today().strftime('%Y.%m') pr_id = 'pr_%s' % pr.number @@ -371,7 +322,7 @@ def download_pr(repo_name, branch_name, pr, arch_job_dir): git_clone_cmd, "Clone repo", arch_job_dir, raise_on_error=False ) if clone_exit_code != 0: - error_stage = ERROR_GIT_CLONE + error_stage = _ERROR_GIT_CLONE return clone_output, clone_error, clone_exit_code, error_stage git_checkout_cmd = ' '.join([ @@ -383,7 +334,7 @@ def download_pr(repo_name, branch_name, pr, arch_job_dir): git_checkout_cmd, "checkout branch '%s'" % branch_name, arch_job_dir, raise_on_error=False ) if checkout_exit_code != 0: - error_stage = ERROR_GIT_CHECKOUT + error_stage = _ERROR_GIT_CHECKOUT return checkout_output, checkout_err, checkout_exit_code, error_stage curl_cmd = f'curl -L https://github.com/{repo_name}/pull/{pr.number}.diff > {pr.number}.diff' @@ -392,7 +343,7 @@ def download_pr(repo_name, branch_name, pr, arch_job_dir): curl_cmd, "Obtain patch", arch_job_dir, raise_on_error=False ) if curl_exit_code != 0: - error_stage = ERROR_CURL + error_stage = _ERROR_CURL return curl_output, curl_error, curl_exit_code, error_stage git_apply_cmd = f'git apply {pr.number}.diff' @@ -401,11 +352,11 @@ def download_pr(repo_name, branch_name, pr, arch_job_dir): git_apply_cmd, "Apply patch", arch_job_dir, raise_on_error=False ) if git_apply_exit_code != 0: - error_stage = ERROR_GIT_APPLY + error_stage = _ERROR_GIT_APPLY return git_apply_output, git_apply_error, git_apply_exit_code, error_stage # need to return four items also in case everything went fine - return 'downloading PR succeeded', 'no error while downloading PR', 0, ERROR_NONE + return 'downloading PR succeeded', 'no error while downloading PR', 0, _ERROR_NONE def comment_download_pr(base_repo_name, pr, download_pr_exit_code, download_pr_error, error_stage): @@ -428,23 +379,23 @@ def comment_download_pr(base_repo_name, pr, download_pr_exit_code, download_pr_e if download_pr_exit_code != 0: fn = sys._getframe().f_code.co_name - download_pr_comments_cfg = config.read_config()[DOWNLOAD_PR_COMMENTS] - if error_stage == ERROR_GIT_CLONE: + download_pr_comments_cfg = config.read_config()[config.SECTION_DOWNLOAD_PR_COMMENTS] + if error_stage == _ERROR_GIT_CLONE: download_comment = (f"`{download_pr_error}`" - f"{download_pr_comments_cfg[GIT_CLONE_FAILURE]}" - f"{download_pr_comments_cfg[GIT_CLONE_TIP]}") - elif error_stage == ERROR_GIT_CHECKOUT: + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_FAILURE]}" + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_TIP]}") + elif error_stage == _ERROR_GIT_CHECKOUT: download_comment = (f"`{download_pr_error}`" - f"{download_pr_comments_cfg[GIT_CHECKOUT_FAILURE]}" - f"{download_pr_comments_cfg[GIT_CHECKOUT_TIP]}") - elif error_stage == ERROR_CURL: + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_FAILURE]}" + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_TIP]}") + elif error_stage == _ERROR_CURL: download_comment = (f"`{download_pr_error}`" - f"{download_pr_comments_cfg[CURL_FAILURE]}" - f"{download_pr_comments_cfg[CURL_TIP]}") - elif error_stage == ERROR_GIT_APPLY: + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_CURL_FAILURE]}" + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_CURL_TIP]}") + elif error_stage == _ERROR_GIT_APPLY: download_comment = (f"`{download_pr_error}`" - f"{download_pr_comments_cfg[GIT_APPLY_FAILURE]}" - f"{download_pr_comments_cfg[GIT_APPLY_TIP]}") + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_FAILURE]}" + f"{download_pr_comments_cfg[config.DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_TIP]}") download_comment = pr_comments.create_comment( repo_name=base_repo_name, pr_number=pr.number, comment=download_comment @@ -498,7 +449,7 @@ def prepare_jobs(pr, cfg, event_info, action_filter): """ fn = sys._getframe().f_code.co_name - app_name = cfg[GITHUB].get(APP_NAME) + app_name = cfg[config.SECTION_GITHUB].get(config.GITHUB_SETTING_APP_NAME) build_env_cfg = get_build_env_cfg(cfg) arch_map = get_architecture_targets(cfg) repocfg = get_repo_cfg(cfg) @@ -521,14 +472,15 @@ def prepare_jobs(pr, cfg, event_info, action_filter): for arch, slurm_opt in arch_map.items(): arch_dir = arch.replace('/', '_') # check if repo_target_map contains an entry for {arch} - if arch not in repocfg[REPO_TARGET_MAP]: + if arch not in repocfg[config.REPO_TARGETS_SETTING_REPO_TARGET_MAP]: log(f"{fn}(): skipping arch {arch} because repo target map does not define repositories to build for") continue - for repo_id in repocfg[REPO_TARGET_MAP][arch]: + for repo_id in repocfg[config.REPO_TARGETS_SETTING_REPO_TARGET_MAP][arch]: # ensure repocfg contains information about the repository repo_id if repo_id != EESSI # Note, EESSI is a bad/misleading name, it should be more like AS_IN_CONTAINER if (repo_id != "EESSI" and repo_id != "EESSI-pilot") and repo_id not in repocfg: - log(f"{fn}(): skipping repo {repo_id}, it is not defined in repo config {repocfg[REPOS_CFG_DIR]}") + log(f"{fn}(): skipping repo {repo_id}, it is not defined in repo" + "config {repocfg[config.REPO_TARGETS_SETTING_REPOS_CFG_DIR]}") continue # if filter exists, check filter against context = (arch, repo, app_name) @@ -586,73 +538,81 @@ def prepare_job_cfg(job_dir, build_env_cfg, repos_cfg, repo_id, software_subdir, """ fn = sys._getframe().f_code.co_name - jobcfg_dir = os.path.join(job_dir, CFG_DIRNAME) - # create ini file job.cfg with entries: + jobcfg_dir = os.path.join(job_dir, job_metadata.JOB_CFG_DIRECTORY_NAME) + # create ini file job.cfg with entries (some values are taken from the + # arguments of the function, some from settings in 'app.cfg', some from the + # repository's definition, some combine two values): # [site_config] - # local_tmp = LOCAL_TMP_VALUE - # shared_fs_path = SHARED_FS_PATH - # build_logs_dir = BUILD_LOGS_DIR + # local_tmp = config.BUILDENV_SETTING_LOCAL_TMP + # shared_fs_path = config.BUILDENV_SETTING_SHARED_FS_PATH + # build_logs_dir = config.BUILDENV_SETTING_BUILD_LOGS_DIR # # [repository] - # repos_cfg_dir = JOB_CFG_DIR - # repo_id = JOB_REPO_ID - # container = CONTAINER - # repo_name = REPO_NAME - # repo_version = REPO_VERSION + # repos_cfg_dir = job_dir/job_metadata.JOB_CFG_DIRECTORY_NAME + # repo_id = repo_id + # container = repos_cfg[cvmfs_repository.REPOS_CFG_CONTAINER] + # repo_name = repo_cfg[cvmfs_repository.REPOS_CFG_REPO_NAME] + # repo_version = repo_cfg[cvmfs_repository.REPOS_CFG_REPO_VERSION] # # [architecture] - # software_subdir = SOFTWARE_SUBDIR - # os_type = OS_TYPE + # software_subdir = software_subdir + # os_type = os_type job_cfg = configparser.ConfigParser() - job_cfg[JOB_SITECONFIG] = {} + job_cfg[job_metadata.JOB_CFG_SITE_CONFIG_SECTION] = {} build_env_to_job_cfg_keys = { - BUILD_LOGS_DIR: BUILD_LOGS_DIR, - CONTAINER_CACHEDIR: CONTAINER_CACHEDIR, - HTTP_PROXY: JOB_HTTP_PROXY, - HTTPS_PROXY: JOB_HTTPS_PROXY, - LOAD_MODULES: JOB_LOAD_MODULES, - LOCAL_TMP: JOB_LOCAL_TMP, - SHARED_FS_PATH: SHARED_FS_PATH, + config.BUILDENV_SETTING_BUILD_LOGS_DIR: job_metadata.JOB_CFG_SITE_CONFIG_BUILD_LOGS_DIR, + config.BUILDENV_SETTING_CONTAINER_CACHEDIR: job_metadata.JOB_CFG_SITE_CONFIG_CONTAINER_CACHEDIR, + config.BUILDENV_SETTING_HTTP_PROXY: job_metadata.JOB_CFG_SITE_CONFIG_HTTP_PROXY, + config.BUILDENV_SETTING_HTTPS_PROXY: job_metadata.JOB_CFG_SITE_CONFIG_HTTPS_PROXY, + config.BUILDENV_SETTING_LOAD_MODULES: job_metadata.JOB_CFG_SITE_CONFIG_LOAD_MODULES, + config.BUILDENV_SETTING_LOCAL_TMP: job_metadata.JOB_CFG_SITE_CONFIG_LOCAL_TMP, + config.BUILDENV_SETTING_SHARED_FS_PATH: job_metadata.JOB_CFG_SITE_CONFIG_SHARED_FS_PATH, } for build_env_key, job_cfg_key in build_env_to_job_cfg_keys.items(): if build_env_cfg[build_env_key]: - job_cfg[JOB_SITECONFIG][job_cfg_key] = build_env_cfg[build_env_key] + job_cfg[job_metadata.JOB_CFG_SITE_CONFIG_SECTION][job_cfg_key] = build_env_cfg[build_env_key] - job_cfg[JOB_REPOSITORY] = {} + job_cfg[job_metadata.JOB_CFG_REPOSITORY_SECTION] = {} # directory for repos.cfg - # NOTE REPOS_CFG_DIR is a global configuration setting for all repositories, - # hence it is stored in repos_cfg whereas repo_cfg used further below - # contains setting for a specific repository - if REPOS_CFG_DIR in repos_cfg and repos_cfg[REPOS_CFG_DIR]: - job_cfg[JOB_REPOSITORY][JOB_REPOS_CFG_DIR] = jobcfg_dir + # NOTE config.REPO_TARGETS_SETTING_REPOS_CFG_DIR is a global configuration + # setting for all repositories, hence it is stored in repos_cfg whereas + # repo_cfg used further below contains setting for a specific repository + repo_section_str = job_metadata.JOB_CFG_REPOSITORY_SECTION + cfg_repos_cfg_dir = config.REPO_TARGETS_SETTING_REPOS_CFG_DIR + if cfg_repos_cfg_dir in repos_cfg and repos_cfg[cfg_repos_cfg_dir]: + job_cfg[repo_section_str][job_metadata.JOB_CFG_REPOSITORY_REPOS_CFG_DIR] = jobcfg_dir # repo id - job_cfg[JOB_REPOSITORY][JOB_REPO_ID] = repo_id + job_cfg[repo_section_str][job_metadata.JOB_CFG_REPOSITORY_REPO_ID] = repo_id # settings for a specific repository if repo_id in repos_cfg: repo_cfg = repos_cfg[repo_id] - if repo_cfg[REPOS_CONTAINER]: - job_cfg[JOB_REPOSITORY][JOB_CONTAINER] = repo_cfg[REPOS_CONTAINER] - if repo_cfg[REPOS_REPO_NAME]: - job_cfg[JOB_REPOSITORY][JOB_REPO_NAME] = repo_cfg[REPOS_REPO_NAME] - if repo_cfg[REPOS_REPO_VERSION]: - job_cfg[JOB_REPOSITORY][JOB_REPO_VERSION] = repo_cfg[REPOS_REPO_VERSION] - - job_cfg[JOB_ARCHITECTURE] = {} - job_cfg[JOB_ARCHITECTURE][JOB_SOFTWARE_SUBDIR] = software_subdir - job_cfg[JOB_ARCHITECTURE][JOB_OS_TYPE] = os_type - - # copy repos_cfg[REPOS_CFG_DIR]/repos.cfg to - # copy repos_cfg[REPOS_CFG_DIR]/*.tgz to - if REPOS_CFG_DIR in repos_cfg and repos_cfg[REPOS_CFG_DIR] and os.path.isdir(repos_cfg[REPOS_CFG_DIR]): - src = repos_cfg[REPOS_CFG_DIR] + if repo_cfg[cvmfs_repository.REPOS_CFG_CONTAINER]: + job_cfg_repo_container = job_metadata.JOB_CFG_REPOSITORY_CONTAINER + job_cfg[repo_section_str][job_cfg_repo_container] = repo_cfg[cvmfs_repository.REPOS_CFG_CONTAINER] + if repo_cfg[cvmfs_repository.REPOS_CFG_REPO_NAME]: + job_cfg_repo_name = job_metadata.JOB_CFG_REPOSITORY_REPO_NAME + job_cfg[repo_section_str][job_cfg_repo_name] = repo_cfg[cvmfs_repository.REPOS_CFG_REPO_NAME] + if repo_cfg[cvmfs_repository.REPOS_CFG_REPO_VERSION]: + job_cfg_repo_version = job_metadata.JOB_CFG_REPOSITORY_REPO_VERSION + job_cfg[repo_section_str][job_cfg_repo_version] = repo_cfg[cvmfs_repository.REPOS_CFG_REPO_VERSION] + + job_cfg_arch_section = job_metadata.JOB_CFG_ARCHITECTURE_SECTION + job_cfg[job_cfg_arch_section] = {} + job_cfg[job_cfg_arch_section][job_metadata.JOB_CFG_ARCHITECTURE_SOFTWARE_SUBDIR] = software_subdir + job_cfg[job_cfg_arch_section][job_metadata.JOB_CFG_ARCHITECTURE_OS_TYPE] = os_type + + # copy contents of directory containing repository configuration to directory + # containing job configuration/metadata + if cfg_repos_cfg_dir in repos_cfg and repos_cfg[cfg_repos_cfg_dir] and os.path.isdir(repos_cfg[cfg_repos_cfg_dir]): + src = repos_cfg[cfg_repos_cfg_dir] shutil.copytree(src, jobcfg_dir) log(f"{fn}(): copied {src} to {jobcfg_dir}") - # make sure that exists + # make sure that exists (in case it wasn't just copied) os.makedirs(jobcfg_dir, exist_ok=True) - jobcfg_file = os.path.join(jobcfg_dir, JOB_CFG_FILENAME) + jobcfg_file = os.path.join(jobcfg_dir, job_metadata.JOB_CFG_FILENAME) with open(jobcfg_file, "w") as jcf: job_cfg.write(jcf) @@ -673,7 +633,7 @@ def submit_job(job, cfg): Returns: tuple of 2 elements containing - (string): id of the submitted job - - (string): path JOBS_BASE_DIR/job.year_month/job.pr_id/SLURM_JOBID which + - (string): path config.BUILDENV_SETTING_JOBS_BASE_DIR/job.year_month/job.pr_id/SLURM_JOBID which is a symlink to the job's working directory (job[0] or job.working_dir) """ fn = sys._getframe().f_code.co_name @@ -682,7 +642,7 @@ def submit_job(job, cfg): # add a default time limit of 24h to the job submit command if no other time # limit is specified already - all_opts_str = " ".join([build_env_cfg[SLURM_PARAMS], job.slurm_opts]) + all_opts_str = " ".join([build_env_cfg[config.BUILDENV_SETTING_SLURM_PARAMS], job.slurm_opts]) all_opts_list = all_opts_str.split(" ") if any([(opt.startswith("--time") or opt.startswith("-t")) for opt in all_opts_list]): time_limit = "" @@ -690,11 +650,11 @@ def submit_job(job, cfg): time_limit = f"--time={DEFAULT_JOB_TIME_LIMIT}" command_line = ' '.join([ - build_env_cfg[SUBMIT_COMMAND], - build_env_cfg[SLURM_PARAMS], + build_env_cfg[config.BUILDENV_SETTING_SUBMIT_COMMAND], + build_env_cfg[config.BUILDENV_SETTING_SLURM_PARAMS], time_limit, job.slurm_opts, - build_env_cfg[BUILD_JOB_SCRIPT], + build_env_cfg[config.BUILDENV_SETTING_BUILD_JOB_SCRIPT], ]) cmdline_output, cmdline_error, cmdline_exit_code = run_cmd(command_line, @@ -703,14 +663,14 @@ def submit_job(job, cfg): # sbatch output is 'Submitted batch job JOBID' # parse job id, add it to array of submitted jobs and create a symlink - # from JOBS_BASE_DIR/job.year_month/job.pr_id/SLURM_JOBID to the job's + # from config.BUILDENV_SETTING_JOBS_BASE_DIR/job.year_month/job.pr_id/SLURM_JOBID to the job's # working directory log(f"{fn}(): sbatch out: {cmdline_output}") log(f"{fn}(): sbatch err: {cmdline_error}") job_id = cmdline_output.split()[3] - symlink = os.path.join(build_env_cfg[JOBS_BASE_DIR], job.year_month, job.pr_id, job_id) + symlink = os.path.join(build_env_cfg[config.BUILDENV_SETTING_JOBS_BASE_DIR], job.year_month, job.pr_id, job_id) log(f"{fn}(): create symlink {symlink} -> {job[0]}") os.symlink(job[0], symlink) @@ -742,17 +702,18 @@ def create_pr_comment(job, job_id, app_name, pr, gh, symlink): dt = datetime.now(timezone.utc) # construct initial job comment - submitted_job_comments_cfg = config.read_config()[SUBMITTED_JOB_COMMENTS] - job_comment = (f"{submitted_job_comments_cfg[INITIAL_COMMENT]}" + submitted_job_comments_cfg = config.read_config()[config.SECTION_SUBMITTED_JOB_COMMENTS] + job_comment = (f"{submitted_job_comments_cfg[config.SUBMITTED_JOB_COMMENTS_SETTING_INITIAL_COMMENT]}" f"\n|date|job status|comment|\n" f"|----------|----------|------------------------|\n" f"|{dt.strftime('%b %d %X %Z %Y')}|" f"submitted|" - f"{submitted_job_comments_cfg[AWAITS_RELEASE]}|").format(app_name=app_name, - arch_name=arch_name, - symlink=symlink, - repo_id=job.repo_id, - job_id=job_id) + f"{submitted_job_comments_cfg[config.SUBMITTED_JOB_COMMENTS_SETTING_AWAITS_RELEASE]}|").format( + app_name=app_name, + arch_name=arch_name, + symlink=symlink, + repo_id=job.repo_id, + job_id=job_id) # create comment to pull request repo_name = pr.base.repo.full_name @@ -787,7 +748,7 @@ def submit_build_jobs(pr, event_info, action_filter): fn = sys._getframe().f_code.co_name cfg = config.read_config() - app_name = cfg[GITHUB].get(APP_NAME) + app_name = cfg[config.SECTION_GITHUB].get(config.GITHUB_SETTING_APP_NAME) # setup job directories (one per element in product of architecture x repositories) jobs = prepare_jobs(pr, cfg, event_info, action_filter) @@ -814,7 +775,7 @@ def submit_build_jobs(pr, event_info, action_filter): pr_comment = pr_comments.PRComment(pr.base.repo.full_name, pr.number, pr_comment.id) # create _bot_job.metadata file in the job's working directory - create_metadata_file(job, job_id, pr_comment) + job_metadata.create_metadata_file(job, job_id, pr_comment) return job_id_to_comment_map @@ -838,16 +799,16 @@ def check_build_permission(pr, event_info): cfg = config.read_config() - buildenv = cfg[BUILDENV] + buildenv = cfg[config.SECTION_BUILDENV] - build_permission = buildenv.get(BUILD_PERMISSION, '') + build_permission = buildenv.get(config.BUILDENV_SETTING_BUILD_PERMISSION, '') log(f"{fn}(): build permission '{build_permission}'") build_labeler = event_info['raw_request_body']['sender']['login'] if build_labeler not in build_permission.split(): log(f"{fn}(): GH account '{build_labeler}' is not authorized to build") - no_build_permission_comment = buildenv.get(NO_BUILD_PERMISSION_COMMENT) + no_build_permission_comment = buildenv.get(config.BUILDENV_SETTING_NO_BUILD_PERMISSION_COMMENT) repo_name = event_info["raw_request_body"]["repository"]["full_name"] pr_comments.create_comment(repo_name, pr.number, @@ -886,7 +847,9 @@ def request_bot_build_issue_comments(repo_name, pr_number): for comment in comments: # iterate through the comments to find the one where the status of the build was in - if config.read_config()["submitted_job_comments"]['initial_comment'][:20] in comment['body']: + submitted_job_comments_section = cfg[config.SECTION_SUBMITTED_JOB_COMMENTS] + initial_comment_fmt = submitted_job_comments_section[config.SUBMITTED_JOB_COMMENTS_SETTING_INITIAL_COMMENT] + if initial_comment_fmt[:20] in comment['body']: # get archictecture from comment['body'] first_line = comment['body'].split('\n')[0] diff --git a/tasks/deploy.py b/tasks/deploy.py index afd61662..32e7705f 100644 --- a/tasks/deploy.py +++ b/tasks/deploy.py @@ -26,23 +26,10 @@ # Local application imports (anything from EESSI/eessi-bot-software-layer) from connections import github -from tasks.build import CFG_DIRNAME, JOB_CFG_FILENAME, JOB_REPO_ID, JOB_REPOSITORY from tasks.build import get_build_env_cfg from tools import config, job_metadata, pr_comments, run_cmd -ARTEFACT_PREFIX = "artefact_prefix" -ARTEFACT_UPLOAD_SCRIPT = "artefact_upload_script" -BUCKET_NAME = "bucket_name" -DEPLOYCFG = "deploycfg" -DEPLOY_PERMISSION = "deploy_permission" -ENDPOINT_URL = "endpoint_url" -JOBS_BASE_DIR = "jobs_base_dir" -METADATA_PREFIX = "metadata_prefix" -NO_DEPLOY_PERMISSION_COMMENT = "no_deploy_permission_comment" -UPLOAD_POLICY = "upload_policy" - - def determine_job_dirs(pr_number): """ Determine working directories of jobs run for a pull request. @@ -57,14 +44,14 @@ def determine_job_dirs(pr_number): job_directories = [] - # a job directory's name has the format cfg[JOBS_BASE_DIR]/YYYY.MM/pr_/JOBID + # a job directory's name has the format cfg[config.BUILDENV_SETTING_JOBS_BASE_DIR]/YYYY.MM/pr_/JOBID # - we may have to scan multiple YYYY.MM directories if the pull request was # processed over more than one month (that is jobs were run in two or more # months) # - we assume that a JOBID is a positive integer cfg = config.read_config() build_env_cfg = get_build_env_cfg(cfg) - jobs_base_dir = build_env_cfg[JOBS_BASE_DIR] + jobs_base_dir = build_env_cfg[config.BUILDENV_SETTING_JOBS_BASE_DIR] log(f"{funcname}(): jobs_base_dir = {jobs_base_dir}") date_pr_job_pattern = (f"[0-9][0-9][0-9][0-9].[0-9][0-9]/" @@ -272,12 +259,12 @@ def upload_artefact(job_dir, payload, timestamp, repo_name, pr_number, pr_commen # obtain config settings cfg = config.read_config() - deploycfg = cfg[DEPLOYCFG] - artefact_upload_script = deploycfg.get(ARTEFACT_UPLOAD_SCRIPT) - endpoint_url = deploycfg.get(ENDPOINT_URL) or '' - bucket_spec = deploycfg.get(BUCKET_NAME) - metadata_prefix = deploycfg.get(METADATA_PREFIX) - artefact_prefix = deploycfg.get(ARTEFACT_PREFIX) + deploycfg = cfg[config.SECTION_DEPLOYCFG] + artefact_upload_script = deploycfg.get(config.DEPLOYCFG_SETTING_ARTEFACT_UPLOAD_SCRIPT) + endpoint_url = deploycfg.get(config.DEPLOYCFG_SETTING_ENDPOINT_URL) or '' + bucket_spec = deploycfg.get(config.DEPLOYCFG_SETTING_BUCKET_NAME) + metadata_prefix = deploycfg.get(config.DEPLOYCFG_SETTING_METADATA_PREFIX) + artefact_prefix = deploycfg.get(config.DEPLOYCFG_SETTING_ARTEFACT_PREFIX) # if bucket_spec value looks like a dict, try parsing it as such if bucket_spec.lstrip().startswith('{'): @@ -291,9 +278,9 @@ def upload_artefact(job_dir, payload, timestamp, repo_name, pr_number, pr_commen if artefact_prefix.lstrip().startswith('{'): artefact_prefix = json.loads(artefact_prefix) - jobcfg_path = os.path.join(job_dir, CFG_DIRNAME, JOB_CFG_FILENAME) + jobcfg_path = os.path.join(job_dir, job_metadata.JOB_CFG_DIRECTORY_NAME, job_metadata.JOB_CFG_FILENAME) jobcfg = config.read_config(jobcfg_path) - target_repo_id = jobcfg[JOB_REPOSITORY][JOB_REPO_ID] + target_repo_id = jobcfg[job_metadata.JOB_CFG_REPOSITORY_SECTION][job_metadata.JOB_CFG_REPOSITORY_REPO_ID] if isinstance(bucket_spec, str): bucket_name = bucket_spec @@ -537,8 +524,8 @@ def deploy_built_artefacts(pr, event_info): log(f"{funcname}(): deploy for PR {pr.number}") cfg = config.read_config() - deploy_cfg = cfg[DEPLOYCFG] - deploy_permission = deploy_cfg.get(DEPLOY_PERMISSION, '') + deploy_cfg = cfg[config.SECTION_DEPLOYCFG] + deploy_permission = deploy_cfg.get(config.DEPLOYCFG_SETTING_DEPLOY_PERMISSION, '') log(f"{funcname}(): deploy permission '{deploy_permission}'") labeler = event_info['raw_request_body']['sender']['login'] @@ -547,7 +534,7 @@ def deploy_built_artefacts(pr, event_info): # permission to trigger the deployment if labeler not in deploy_permission.split(): log(f"{funcname}(): GH account '{labeler}' is not authorized to deploy") - no_deploy_permission_comment = deploy_cfg.get(NO_DEPLOY_PERMISSION_COMMENT) + no_deploy_permission_comment = deploy_cfg.get(config.DEPLOYCFG_SETTING_NO_DEPLOY_PERMISSION_COMMENT) repo_name = event_info["raw_request_body"]["repository"]["full_name"] pr_comments.create_comment(repo_name, pr.number, @@ -557,7 +544,7 @@ def deploy_built_artefacts(pr, event_info): log(f"{funcname}(): GH account '{labeler}' is authorized to deploy") # get upload policy from config - upload_policy = deploy_cfg.get(UPLOAD_POLICY) + upload_policy = deploy_cfg.get(config.DEPLOYCFG_SETTING_UPLOAD_POLICY) log(f"{funcname}(): upload policy '{upload_policy}'") if upload_policy == "none": diff --git a/tools/config.py b/tools/config.py index 7ead2c84..0a325950 100644 --- a/tools/config.py +++ b/tools/config.py @@ -24,6 +24,87 @@ # Local application imports (anything from EESSI/eessi-bot-software-layer) from .logging import error +# define configration constants +# SECTION_sectionname for any section name in app.cfg +# sectionname_SETTING_settingname for any setting with name settingname in +# section sectionname +SECTION_ARCHITECTURETARGETS = 'architecturetargets' +ARCHITECTURETARGETS_SETTING_ARCH_TARGET_MAP = 'arch_target_map' + +SECTION_BOT_CONTROL = 'bot_control' +BOT_CONTROL_SETTING_COMMAND_PERMISSION = 'command_permission' +BOT_CONTROL_SETTING_COMMAND_RESPONSE_FMT = 'command_response_fmt' + +SECTION_BUILDENV = 'buildenv' +BUILDENV_SETTING_BUILD_JOB_SCRIPT = 'build_job_script' +BUILDENV_SETTING_BUILD_LOGS_DIR = 'build_logs_dir' +BUILDENV_SETTING_BUILD_PERMISSION = 'build_permission' +BUILDENV_SETTING_CONTAINER_CACHEDIR = 'container_cachedir' +BUILDENV_SETTING_CVMFS_CUSTOMIZATIONS = 'cvmfs_customizations' +BUILDENV_SETTING_HTTPS_PROXY = 'https_proxy' +BUILDENV_SETTING_HTTP_PROXY = 'http_proxy' +BUILDENV_SETTING_JOBS_BASE_DIR = 'jobs_base_dir' +BUILDENV_SETTING_LOAD_MODULES = 'load_modules' +BUILDENV_SETTING_LOCAL_TMP = 'local_tmp' +BUILDENV_SETTING_NO_BUILD_PERMISSION_COMMENT = 'no_build_permission_comment' +BUILDENV_SETTING_SHARED_FS_PATH = 'shared_fs_path' +BUILDENV_SETTING_SLURM_PARAMS = 'slurm_params' +BUILDENV_SETTING_SUBMIT_COMMAND = 'submit_command' + +SECTION_DEPLOYCFG = 'deploycfg' +DEPLOYCFG_SETTING_ARTEFACT_PREFIX = 'artefact_prefix' +DEPLOYCFG_SETTING_ARTEFACT_UPLOAD_SCRIPT = 'artefact_upload_script' +DEPLOYCFG_SETTING_BUCKET_NAME = 'bucket_name' +DEPLOYCFG_SETTING_DEPLOY_PERMISSION = 'deploy_permission' +DEPLOYCFG_SETTING_ENDPOINT_URL = 'endpoint_url' +DEPLOYCFG_SETTING_METADATA_PREFIX = 'metadata_prefix' +DEPLOYCFG_SETTING_NO_DEPLOY_PERMISSION_COMMENT = 'no_deploy_permission_comment' +DEPLOYCFG_SETTING_UPLOAD_POLICY = 'upload_policy' + +SECTION_DOWNLOAD_PR_COMMENTS = 'download_pr_comments' +DOWNLOAD_PR_COMMENTS_SETTING_CURL_FAILURE = 'curl_failure' +DOWNLOAD_PR_COMMENTS_SETTING_CURL_TIP = 'curl_tip' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_FAILURE = 'git_apply_failure' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_APPLY_TIP = 'git_apply_tip' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_FAILURE = 'git_checkout_failure' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_CHECKOUT_TIP = 'git_checkout_tip' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_FAILURE = 'git_clone_failure' +DOWNLOAD_PR_COMMENTS_SETTING_GIT_CLONE_TIP = 'git_clone_tip' + +SECTION_EVENT_HANDLER = 'event_handler' +EVENT_HANDLER_SETTING_LOG_PATH = 'log_path' + +SECTION_FINISHED_JOB_COMMENTS = 'finished_job_comments' +FINISHED_JOB_COMMENTS_SETTING_JOB_RESULT_UNKNOWN_FMT = 'job_result_unknown_fmt' +FINISHED_JOB_COMMENTS_SETTING_JOB_TEST_UNKNOWN_FMT = 'job_test_unknown_fmt' + +SECTION_GITHUB = 'github' +GITHUB_SETTING_APP_ID = 'app_id' +GITHUB_SETTING_APP_NAME = 'app_name' +GITHUB_SETTING_INSTALLATION_ID = 'installation_id' +GITHUB_SETTING_PRIVATE_KEY = 'private_key' + +SECTION_JOB_MANAGER = 'job_manager' +JOB_MANAGER_SETTING_LOG_PATH = 'log_path' +JOB_MANAGER_SETTING_JOB_IDS_DIR = 'job_ids_dir' +JOB_MANAGER_SETTING_POLL_COMMAND = 'poll_command' +JOB_MANAGER_SETTING_POLL_INTERVAL = 'poll_interval' +JOB_MANAGER_SETTING_SCONTROL_COMMAND = 'scontrol_command' + +SECTION_NEW_JOB_COMMENTS = 'new_job_comments' +NEW_JOB_COMMENTS_SETTING_AWAITS_LAUNCH = 'awaits_launch' + +SECTION_REPO_TARGETS = 'repo_targets' +REPO_TARGETS_SETTING_REPO_TARGET_MAP = 'repo_target_map' +REPO_TARGETS_SETTING_REPOS_CFG_DIR = 'repos_cfg_dir' + +SECTION_RUNNING_JOB_COMMENTS = 'running_job_comments' +RUNNING_JOB_COMMENTS_SETTING_RUNNING_JOB = 'running_job' + +SECTION_SUBMITTED_JOB_COMMENTS = 'submitted_job_comments' +SUBMITTED_JOB_COMMENTS_SETTING_INITIAL_COMMENT = 'initial_comment' +SUBMITTED_JOB_COMMENTS_SETTING_AWAITS_RELEASE = 'awaits_release' + def read_config(path='app.cfg'): """ diff --git a/tools/cvmfs_repository.py b/tools/cvmfs_repository.py new file mode 100644 index 00000000..8b2c881f --- /dev/null +++ b/tools/cvmfs_repository.py @@ -0,0 +1,33 @@ +# This file is part of the EESSI build-and-deploy bot, +# see https://github.com/EESSI/eessi-bot-software-layer +# +# The bot helps with requests to add software installations to the +# EESSI software layer, see https://github.com/EESSI/software-layer +# +# author: Thomas Roeblitz (@trz42) +# +# license: GPLv2 +# + +# Standard library imports +# (none yet) + +# Third party imports (anything installed into the local Python environment) +# (none yet) + +# Local application imports (anything from EESSI/eessi-bot-software-layer) +# (none yet) + + +# access to a CernVM-FS repository is defined via a repos.cfg file and associated +# tarballs containing configuration settings per repository +# below, we define constants for the settings of each repository; +# the section name, eg, 'eessi-2023.06' in '[eessi-2023.06]' is not fixed and +# therefore no constant is defined for itsections and 'settings' in these files +# +# cfg/repos.cfg +REPOS_CFG_CONFIG_BUNDLE = "config_bundle" +REPOS_CFG_CONFIG_MAP = "config_map" +REPOS_CFG_CONTAINER = "container" +REPOS_CFG_REPO_NAME = "repo_name" +REPOS_CFG_REPO_VERSION = "repo_version" diff --git a/tools/job_metadata.py b/tools/job_metadata.py index e93218b0..992bb387 100644 --- a/tools/job_metadata.py +++ b/tools/job_metadata.py @@ -21,13 +21,53 @@ # (none yet) +# the job's working directory (JWD) and subdirectories may contain various +# files storing metadata for a job +# below, we define constants for sections and 'settings' in these files +# +# job config directory name and filename +JOB_CFG_DIRECTORY_NAME = "cfg" +JOB_CFG_FILENAME = "job.cfg" + +# JWD/cfg/$JOB_CFG_FILENAME +JOB_CFG_ARCHITECTURE_SECTION = "architecture" +JOB_CFG_ARCHITECTURE_OS_TYPE = "os_type" +JOB_CFG_ARCHITECTURE_SOFTWARE_SUBDIR = "software_subdir" + +JOB_CFG_REPOSITORY_SECTION = "repository" +JOB_CFG_REPOSITORY_CONTAINER = "container" +JOB_CFG_REPOSITORY_REPOS_CFG_DIR = "repos_cfg_dir" +JOB_CFG_REPOSITORY_REPO_ID = "repo_id" +JOB_CFG_REPOSITORY_REPO_NAME = "repo_name" +JOB_CFG_REPOSITORY_REPO_VERSION = "repo_version" + +JOB_CFG_SITE_CONFIG_SECTION = "site_config" +JOB_CFG_SITE_CONFIG_BUILD_LOGS_DIR = "build_logs_dir" +JOB_CFG_SITE_CONFIG_CONTAINER_CACHEDIR = "container_cachedir" +JOB_CFG_SITE_CONFIG_HTTP_PROXY = "http_proxy" +JOB_CFG_SITE_CONFIG_HTTPS_PROXY = "https_proxy" +JOB_CFG_SITE_CONFIG_LOAD_MODULES = "load_modules" +JOB_CFG_SITE_CONFIG_LOCAL_TMP = "local_tmp" +JOB_CFG_SITE_CONFIG_SHARED_FS_PATH = "shared_fs_path" + +# JWD/_bot_jobJOBID.metadata JOB_PR_SECTION = "PR" +JOB_PR_REPO = "repo" +JOB_PR_PR_NUMBER = "pr_number" +JOB_PR_PR_COMMENT_ID = "pr_comment_id" + +# JWD/_bot_jobJOBID.result +JOB_RESULT_SECTION = "RESULT" JOB_RESULT_ARTEFACTS = "artefacts" +JOB_RESULT_COMMENT_DESCRIPTION = "comment_description" JOB_RESULT_FAILURE = "FAILURE" -JOB_RESULT_SECTION = "RESULT" JOB_RESULT_STATUS = "status" JOB_RESULT_SUCCESS = "SUCCESS" + +# JWD/_bot_jobJOBID.test JOB_TEST_SECTION = "TEST" +JOB_TEST_COMMENT_DESCRIPTION = "comment_description" +JOB_TEST_STATUS = "status" def create_metadata_file(job, job_id, pr_comment): @@ -50,9 +90,9 @@ def create_metadata_file(job, job_id, pr_comment): # create _bot_job.metadata file in the job's working directory bot_jobfile = configparser.ConfigParser() - bot_jobfile['PR'] = {'repo': repo_name, - 'pr_number': pr_number, - 'pr_comment_id': pr_comment_id} + bot_jobfile[JOB_PR_SECTION] = {'repo': repo_name, + 'pr_number': pr_number, + 'pr_comment_id': pr_comment_id} bot_jobfile_path = os.path.join(job.working_dir, f'_bot_job{job_id}.metadata') with open(bot_jobfile_path, 'w') as bjf: bot_jobfile.write(bjf) diff --git a/tools/permissions.py b/tools/permissions.py index 59030b05..1b7f40df 100644 --- a/tools/permissions.py +++ b/tools/permissions.py @@ -18,9 +18,6 @@ # Local application imports (anything from EESSI/eessi-bot-software-layer) from tools import config -BOT_CONTROL = "bot_control" -COMMAND_PERMISSION = "command_permission" - def check_command_permission(account): """ @@ -38,10 +35,10 @@ def check_command_permission(account): cfg = config.read_config() - bot_ctrl = cfg[BOT_CONTROL] + bot_ctrl = cfg[config.SECTION_BOT_CONTROL] # read command permission from configuration (defined in file app.cfg) - command_permission = bot_ctrl.get(COMMAND_PERMISSION, '') + command_permission = bot_ctrl.get(config.BOT_CONTROL_SETTING_COMMAND_PERMISSION, '') log(f"{fn}(): command permission '{command_permission}'")