diff --git a/cloudinary_cli/modules/__init__.py b/cloudinary_cli/modules/__init__.py index 996eab9..d387802 100644 --- a/cloudinary_cli/modules/__init__.py +++ b/cloudinary_cli/modules/__init__.py @@ -3,11 +3,13 @@ from .sync import sync from .upload_dir import upload_dir from .regen_derived import regen_derived +from .clone import clone commands = [ upload_dir, make, migrate, sync, - regen_derived + regen_derived, + clone ] diff --git a/cloudinary_cli/modules/clone.py b/cloudinary_cli/modules/clone.py new file mode 100644 index 0000000..1500a06 --- /dev/null +++ b/cloudinary_cli/modules/clone.py @@ -0,0 +1,126 @@ +from click import command, option, style +from cloudinary_cli.utils.utils import normalize_list_params, \ + print_help_and_exit +import cloudinary +from cloudinary_cli.utils.utils import run_tasks_concurrently +from cloudinary_cli.utils.api_utils import upload_file +from cloudinary_cli.utils.config_utils import load_config +from cloudinary_cli.defaults import logger +from cloudinary_cli.core.search import execute_single_request, \ + handle_auto_pagination + +DEFAULT_MAX_RESULTS = 500 + + +@command("clone", + short_help="""Clone assets from one account to another.""", + help=""" +\b +Clone assets from one environment to another with/without tags and/or context (structured metadata is not currently supported). +Source will be your `CLOUDINARY_URL` environemnt variable but you also can specify a different source using `-c/-C` option. +Cloning restricted assets is also not supported currently. +Format: cld clone -T +`` can be a CLOUDINARY_URL or a saved config (see `config` command) +e.g. cld clone -T cloudinary://:@ -f tags,context +""") +@option("-T", "--target", + help="Tell the CLI the target environemnt to run the command on.") +@option("-F", "--force", is_flag=True, + help="Skip confirmation.") +@option("-O", "--overwrite", is_flag=True, default=False, + help="Specify whether to overwrite existing assets.") +@option("-w", "--concurrent_workers", type=int, default=30, + help="Specify the number of concurrent network threads.") +@option("-f", "--fields", multiple=True, + help="Specify whether to copy tags and context.") +@option("-se", "--search_exp", default="", + help="Define a search expression.") +@option("--async", "async_", is_flag=True, default=False, + help="Generate asynchronously.") +@option("-nu", "--notification_url", + help="Webhook notification URL.") +def clone(target, force, overwrite, concurrent_workers, fields, search_exp, + async_, notification_url): + if not target: + print_help_and_exit() + + target_config = cloudinary.Config() + is_cloudinary_url = False + if target.startswith("cloudinary://"): + is_cloudinary_url = True + parsed_url = target_config._parse_cloudinary_url(target) + elif target in load_config(): + parsed_url = target_config._parse_cloudinary_url(load_config().get(target)) + else: + logger.error("The specified config does not exist or the " + "CLOUDINARY_URL scheme provided is invalid " + "(expecting to start with 'cloudinary://').") + return False + + target_config._setup_from_parsed_url(parsed_url) + target_config_dict = {k: v for k, v in target_config.__dict__.items() + if not k.startswith("_")} + if is_cloudinary_url: + try: + cloudinary.api.ping(**target_config_dict) + except Exception as e: + logger.error(f"{e}. Please double-check your Cloudinary URL.") + return False + + source_cloudname = cloudinary.config().cloud_name + target_cloudname = target_config.cloud_name + if source_cloudname == target_cloudname: + logger.info("Target environment cannot be the " + "same as source environment.") + return True + + copy_fields = normalize_list_params(fields) + search = cloudinary.search.Search().expression(search_exp) + search.fields(['tags', 'context', 'access_control', + 'secure_url', 'display_name']) + search.max_results(DEFAULT_MAX_RESULTS) + res = execute_single_request(search, fields_to_keep="") + res = handle_auto_pagination(res, search, force, fields_to_keep="") + + upload_list = [] + for r in res.get('resources'): + updated_options, asset_url = process_metadata(r, overwrite, async_, + notification_url, + copy_fields) + updated_options.update(target_config_dict) + upload_list.append((asset_url, {**updated_options})) + + logger.info(style(f'Copying {len(upload_list)} asset(s) to ' + f'{target_cloudname}', fg="blue")) + run_tasks_concurrently(upload_file, upload_list, + concurrent_workers) + + return True + + +def process_metadata(res, overwrite, async_, notification_url, copy_fields=""): + cloned_options = {} + asset_url = res.get('secure_url') + cloned_options['public_id'] = res.get('public_id') + cloned_options['type'] = res.get('type') + cloned_options['resource_type'] = res.get('resource_type') + cloned_options['overwrite'] = overwrite + cloned_options['async'] = async_ + if "tags" in copy_fields: + cloned_options['tags'] = res.get('tags') + if "context" in copy_fields: + cloned_options['context'] = res.get('context') + if res.get('folder'): + # This is required to put the asset in the correct asset_folder + # when copying from a fixed to DF (dynamic folder) cloud as if + # you just pass a `folder` param to a DF cloud, it will append + # this to the `public_id` and we don't want this. + cloned_options['asset_folder'] = res.get('folder') + elif res.get('asset_folder'): + cloned_options['asset_folder'] = res.get('asset_folder') + if res.get('display_name'): + cloned_options['display_name'] = res.get('display_name') + if notification_url: + cloned_options['notification_url'] = notification_url + + return cloned_options, asset_url diff --git a/cloudinary_cli/utils/api_utils.py b/cloudinary_cli/utils/api_utils.py index 2367817..e0a1389 100644 --- a/cloudinary_cli/utils/api_utils.py +++ b/cloudinary_cli/utils/api_utils.py @@ -13,6 +13,8 @@ from cloudinary_cli.utils.json_utils import print_json, write_json_to_file from cloudinary_cli.utils.utils import log_exception, confirm_action, get_command_params, merge_responses, \ normalize_list_params, ConfigurationError, print_api_help, duplicate_values +import re +from cloudinary.utils import is_remote_url PAGINATION_MAX_RESULTS = 500 @@ -116,15 +118,20 @@ def upload_file(file_path, options, uploaded=None, failed=None): verbose = logger.getEffectiveLevel() < logging.INFO try: - size = path.getsize(file_path) + size = 0 if is_remote_url(file_path) else path.getsize(file_path) upload_func = uploader.upload if size > 20000000: upload_func = uploader.upload_large result = upload_func(file_path, **options) disp_path = _display_path(result) - disp_str = f"as {result['public_id']}" if not disp_path \ - else f"as {disp_path} with public_id: {result['public_id']}" - logger.info(style(f"Successfully uploaded {file_path} {disp_str}", fg="green")) + if "batch_id" in result: + starting_msg = "Uploading" + disp_str = f"asynchnously with batch_id: {result['batch_id']}" + else: + starting_msg = "Successfully uploaded" + disp_str = f"as {result['public_id']}" if not disp_path \ + else f"as {disp_path} with public_id: {result['public_id']}" + logger.info(style(f"{starting_msg} {file_path} {disp_str}", fg="green")) if verbose: print_json(result) uploaded[file_path] = {"path": asset_source(result), "display_path": disp_path} @@ -210,12 +217,15 @@ def asset_source(asset_details): :return: """ - base_name = asset_details['public_id'] + base_name = asset_details.get('public_id', '') + + if not base_name: + return base_name if asset_details['resource_type'] == 'raw' or asset_details['type'] == 'fetch': return base_name - return base_name + '.' + asset_details['format'] + return base_name + '.' + asset_details.get('format', '') def get_folder_mode(): @@ -276,7 +286,6 @@ def handle_api_command( """ Used by Admin and Upload API commands """ - if doc: return launch(doc_url)