diff --git a/doc/content/api-reference/bucket-index.png b/doc/content/api-reference/bucket-index.png new file mode 100644 index 00000000..7193979f Binary files /dev/null and b/doc/content/api-reference/bucket-index.png differ diff --git a/doc/content/api-reference/buckets.rst b/doc/content/api-reference/buckets.rst new file mode 100644 index 00000000..e94444ec --- /dev/null +++ b/doc/content/api-reference/buckets.rst @@ -0,0 +1,99 @@ +is_template: False + + +Buckets +======= + +The bucket API implements the server-side of file-uploads and can be used to +make files accessible over HTTP. It does not implement the browser-side, so it +is best used with a library like +`lona-dropzone `_. + +A bucket is basically a temporary directory, that gets created when a bucket +is initialized with a request object, and closed automatically when the view, +associated with the request, gets removed from the server. + + +Usage +----- + +.. code-block:: python + + from lona import View, Bucket, HTML, A + + + class BucketView(View): + def handle_request(self, request): + self.bucket = Bucket( + request=request, + on_add=self.on_add, # optional + on_delete=self.on_delete, # optional + ) + + return HTML( + A( + 'Bucket' + href=self.bucket.get_url(), # link to the bucket + target='_blank', # open link in new tab + interactive=False, + ), + ) + + def on_add(self, file_names): + # this method gets called whenever a file gets added (uploaded) + # `file_names` is a list of strings + + pass + + + def on_delete(self, file_names): + # this method gets called whenever a file gets deleted + # `file_names` is a list of strings + + pass + +Regardless of the URL of the view that opened the bucket, all buckets are +accessible at ``/buckets//``. + +To upload a file, issue a multipart POST request to +``/buckets///add``. + +To delete a file, issue a POST form request to +``/buckets///delete``. The bucket will search for the +form key ``name``. + +The URL prefix (``/buckets/``) can be changed using +``settings.BUCKETS_URL_PREFIX``. + +When ``Bucket.index`` is enabled, a generic frontend for listing, adding, and +deleting files for a bucket is available at +``/buckets//``. + +.. image:: bucket-index.png + + +Arguments +--------- + +.. api-doc:: lona.Bucket.__init__ + + +Methods +------- + +.. api-doc:: lona.Bucket.get_path +.. api-doc:: lona.Bucket.get_file_names +.. api-doc:: lona.Bucket.get_size +.. api-doc:: lona.Bucket.get_url +.. api-doc:: lona.Bucket.get_add_url +.. api-doc:: lona.Bucket.get_delete_url + + +Customization +------------- + +The Bucket index page can be customized by overriding the template +``lona/bucket.html``. + +.. code-block:: html + :include: ../../../lona/templates/lona/bucket.html diff --git a/doc/content/api-reference/settings.rst b/doc/content/api-reference/settings.rst index 9f78b300..fe6b3e72 100644 --- a/doc/content/api-reference/settings.rst +++ b/doc/content/api-reference/settings.rst @@ -255,6 +255,16 @@ Channels :path: lona.default_settings.CHANNEL_TASK_WORKER_CLASS +Buckets +------- + +.. setting:: + :name: BUCKETS_URL_PREFIX + :path: lona.default_settings.BUCKETS_URL_PREFIX + + Default prefix for :link:`bucket ` urls + + Middlewares ----------- diff --git a/doc/content/demos/file-upload/demo.gif b/doc/content/demos/file-upload/demo.gif new file mode 100644 index 00000000..68996394 Binary files /dev/null and b/doc/content/demos/file-upload/demo.gif differ diff --git a/doc/content/demos/file-upload/demo.py b/doc/content/demos/file-upload/demo.py new file mode 100644 index 00000000..46bb91d8 --- /dev/null +++ b/doc/content/demos/file-upload/demo.py @@ -0,0 +1,63 @@ +from datetime import datetime + +from lona_picocss.html import ScrollerPre, HTML, Grid, Div, H1, A +from lona_picocss import install_picocss +from lona_dropzone import Dropzone + +from lona import View, App + +app = App(__file__) + +install_picocss(app, debug=True) + + +@app.route('/') +class DropzoneView(View): + def handle_request(self, request): + self.dropzone = Dropzone( + request=request, + on_add=self.on_add, + on_delete=self.on_delete, + ) + + self.scroller = ScrollerPre(height='30em') + + return HTML( + H1('Dropzone'), + Grid( + Div( + self.dropzone, + ), + Div( + self.scroller, + A( + 'Bucket URL', + href=self.dropzone.bucket.get_url(), + target='_blank', + interactive=False, + ), + ), + ), + ) + + def on_add(self, file_names): + # this method gets called whenever a file gets added (uploaded) + + self.scroller.append( + f'{datetime.now()}: on_add: {file_names=}\n', + ) + + self.show() + + def on_delete(self, file_names): + # this method gets called whenever a file gets deleted + + self.scroller.append( + f'{datetime.now()}: on_delete: {file_names=}\n', + ) + + self.show() + + +if __name__ == '__main__': + app.run() diff --git a/doc/content/demos/file-upload/index.rst b/doc/content/demos/file-upload/index.rst new file mode 100644 index 00000000..caed49a2 --- /dev/null +++ b/doc/content/demos/file-upload/index.rst @@ -0,0 +1,25 @@ + + +File Upload +=========== + +This demo showcases a file-uploads, using +:link:`lona-dropzone `, which +uses the :link:`bucket API ` internally. + +.. image:: demo.gif + + +Install Dependencies +-------------------- + +.. code-block:: text + + pip install lona lona-picocss + + +Source code +----------- + +.. code-block:: python + :include: demo.py diff --git a/doc/content/roadmap.rst b/doc/content/roadmap.rst index d92d929e..c28940aa 100644 --- a/doc/content/roadmap.rst +++ b/doc/content/roadmap.rst @@ -22,7 +22,7 @@ Getting Feature Complete - [ ] Add range input - [ ] Add color input -- [ ] Add support for file uploads +- [x] Add support for file uploads - [ ] Add support for JavaScript Modules diff --git a/doc/settings.py b/doc/settings.py index 7e9e33de..dda51365 100644 --- a/doc/settings.py +++ b/doc/settings.py @@ -65,6 +65,7 @@ ['Multi User Chat', 'demos/multi-user-chat/index.rst'], ['Game Of Life', 'demos/game-of-life/index.rst'], ['Channels', 'demos/channels/index.rst'], + ['File Upload', 'demos/file-upload/index.rst'], ]], ['API Reference', [ @@ -77,6 +78,7 @@ ['Middlewares', 'api-reference/middlewares.rst'], ['Settings', 'api-reference/settings.rst'], ['Sessions', 'api-reference/sessions.rst'], + ['Buckets', 'api-reference/buckets.rst'], ['Channels', 'api-reference/channels.rst'], ['Testing', 'api-reference/testing.rst'], ['Lona Shell', 'api-reference/lona-shell.rst'], diff --git a/lona/__init__.py b/lona/__init__.py index 3a9c7ac9..6d414d67 100644 --- a/lona/__init__.py +++ b/lona/__init__.py @@ -8,6 +8,7 @@ from .app import App as LonaApp from .channels import Channel from .request import Request +from .buckets import Bucket from .view import View from .app import App diff --git a/lona/buckets.py b/lona/buckets.py new file mode 100644 index 00000000..f35c0afe --- /dev/null +++ b/lona/buckets.py @@ -0,0 +1,437 @@ +from __future__ import annotations + +from tempfile import TemporaryDirectory +from threading import Lock +import logging +import os + +from lona import HttpRedirectResponse, TemplateResponse, FileResponse, Response +from lona.unique_ids import generate_unique_id2 + +logger = logging.getLogger('lona.buckets') + + +class Bucket: + _lock: Lock = Lock() + + _buckets: dict = { + # request.id: { + # bucket.id: bucket + # } + } + + @classmethod + def _open_bucket(cls, request, bucket): + with cls._lock: + if request.id not in cls._buckets: + cls._buckets[request.id] = {} + + cls._buckets[request.id][bucket.id] = bucket + + logger.info( + 'bucket opened (request=%s, bucket=%s)', + request.id, + bucket.id, + ) + + @classmethod + def _get_bucket(cls, request_id, bucket_id): + with cls._lock: + if (request_id in cls._buckets and + bucket_id in cls._buckets[request_id]): + + return cls._buckets[request_id][bucket_id] + + @classmethod + def _close_buckets(cls, request): + buckets = {} + + with cls._lock: + if request.id in cls._buckets: + buckets.update(cls._buckets.pop(request.id)) + + for bucket in buckets.values(): + bucket._temp_dir.cleanup() + + logger.info( + 'bucket closed (request=%s, bucket=%s)', + request.id, + bucket.id, + ) + + def __init__( + self, + request, + max_files=None, + max_size=None, + index=True, + on_add=None, + on_delete=None, + ): + + """ + :request: `lona.Request` object + + :max_files: Maximum of files that can be added (uploaded) to the + bucket as integer. If max_files is `None` any amount of + files is allowed. + + :max_size: Maximum of bytes that can be added (uploaded) to the + bucket as integer. If max_size is `None` any amount of + bytes is allowed. + + :index: HTTP/HTML index is enabled. + + :on_add: Optional callback which is called with the list of added + file names, when one or more files are added in a + POST request. + + :on_delete: Optional callback which is called with the list of deleted + file names, when one or more files are deleted in a + POST request. + """ + + self.request = request + self.max_files = max_files + self.max_size = max_size + self.index = index + self.on_add = on_add + self.on_delete = on_delete + + self._write_lock = Lock() + self._id = generate_unique_id2() + self._temp_dir = TemporaryDirectory() + self._open_bucket(request=request, bucket=self) + + def __repr__(self): + return f'' + + @property + def id(self): + return self._id + + def get_path(self, file_name=''): + """ + Returns the absolute path to the given file name, as a string. If no + file name is given, the absolute path to the buckets directory is + returned. + + :file_name: optional string file name + """ + + path = self._temp_dir.name + + if file_name: + if file_name.startswith('/'): + file_name = file_name[1:] + + path = os.path.join(path, file_name) + + return path + + def get_file_names(self): + """ + Returns a list of all file names in the bucket as strings. + """ + + return os.listdir(self.get_path()) + + def get_size(self): + """ + Returns the sum of the sizes of all files in bytes, contained in the + bucket, as integer. + """ + + size = 0 + + for entry in os.scandir(self.get_path()): + size += entry.stat().st_size + + return size + + def get_url(self, file_name=''): + """ + Returns the URL to the given file name, as a string. If no file + name is given, the URL to the buckets index is returned. + + :file_name: optional string file name + """ + + prefix = self.request.server.settings.BUCKETS_URL_PREFIX + + if prefix.startswith('/'): + prefix = prefix[1:] + + if prefix.endswith('/'): + prefix = prefix[:-1] + + url = f'/{prefix}/{self.request.id}/{self.id}' + + if file_name: + url = f'{url}/{file_name}' + + return url + + def get_add_url(self): + """ + Returns the add (upload) URL of the bucket as a string. + """ + + return self.get_url(file_name='add') + + def get_delete_url(self): + """ + Returns the delete URL of the bucket as a string. + """ + + return self.get_url(file_name='delete') + + +class BucketsMiddleware: + async def _handle_file_upload(self, http_request, request_id, bucket): + file_names = [] + files_written = len(bucket.get_file_names()) + bytes_written = bucket.get_size() + + async for field in (await http_request.multipart()): + if not field.filename: + continue + + rel_path = field.filename + abs_path = os.path.join(bucket.get_path(file_name=rel_path)) + file_size = 0 + + # check if upload exceeds bucket.max_files + if (bucket.max_files is not None and + files_written + 1 > bucket.max_files): + + logger.info( + '%s exceeds the buckets max files (request=%s, bucket=%s)', + rel_path, + request_id, + bucket.id, + ) + + return file_names, 'too many files uploaded' + + # write file to file system + file_names.append(rel_path) + files_written += 1 + + with open(abs_path, 'wb') as file_handle: + while True: + chunk = await field.read_chunk() + chunk_size = len(chunk) + + if chunk_size == 0: + break + + # check if upload exceeds bucket.max_size + if (bucket.max_size is not None and + bytes_written + chunk_size > bucket.max_size): + + logger.info( + '%s exceeds the buckets max size (request=%s, bucket=%s)', + rel_path, + request_id, + bucket.id, + ) + + return file_names, 'too much data uploadad' + + file_handle.write(chunk) + file_size += chunk_size + bytes_written += chunk_size + + logger.info( + '%s bytes written to %s (request=%s, bucket=%s)', + file_size, + abs_path, + request_id, + bucket.id, + ) + + return file_names, '' + + def handle_http_request(self, data): + server = data.server + settings = server.settings + http_request = data.http_request + redirect_url = http_request.query.get('redirect', '') + + def success(): + if redirect_url: + return HttpRedirectResponse(redirect_url) + + return Response(status=200) + + # check if request is a bucket request + if not http_request.path.startswith(settings.BUCKETS_URL_PREFIX): + return data + + # parse request + # valid requests: + # POST //// + # POST //// + # GET //// + # GET ///(/) + path_parts = [part for part in http_request.path.split('/') if part] + + if len(path_parts) < 3 or len(path_parts) > 4: + return data + + if len(path_parts) < 4: + path_parts.append('') # empty file name + + _, request_id, bucket_id, file_name_or_action = path_parts + bucket = Bucket._get_bucket(request_id=request_id, bucket_id=bucket_id) + + if file_name_or_action in ('add', 'delete'): + action = file_name_or_action + file_name = '' + + else: + action = '' + file_name = file_name_or_action + + # search bucket + if not bucket: + return Response(status=404) + + # handle request + if http_request.method == 'POST': + + # file upload + if action == 'add': + + # check content type + if not http_request.content_type.startswith('multipart/'): + return Response(status=400) + + # we have to lock the bucket for writing so bucket.max_files + # and bucket.max_size are not overrun by multiple + # concurrent requests + with bucket._write_lock: + uploaded_files, error_message = server.run_coroutine_sync( + self._handle_file_upload( + http_request=http_request, + request_id=request_id, + bucket=bucket, + ), + wait=True, + ) + + if error_message: + for rel_path in uploaded_files: + abs_path = os.path.join( + bucket.get_path(file_name=rel_path), + ) + + os.unlink(abs_path) + + logger.info( + '%s was deleted (request=%s, bucket=%s)', + abs_path, + request_id, + bucket_id, + ) + + return Response(status=400, text=error_message) + + # bucket.on_add hook + if uploaded_files and bucket.on_add: + logger.debug( + 'running %s (request=%s, bucket=%s)', + bucket.on_add, + request_id, + bucket_id, + ) + + try: + bucket.on_add(uploaded_files) + + except Exception: + logger.exception( + 'Exception raised while running %s (request=%s, bucket=%s)', + bucket.on_add, + request_id, + bucket_id, + ) + + return success() + + # file deletion + elif action == 'delete': + post_data = server.run_coroutine_sync( + http_request.post(), + wait=True, + ) + + if 'name' not in post_data: + return Response(status=400) + + rel_path = post_data['name'] + abs_path = os.path.join(bucket.get_path(file_name=rel_path)) + + if not os.path.exists(abs_path): + return Response(status=404) + + logger.info( + '%s deleting (request=%s, bucket=%s)', + abs_path, + request_id, + bucket_id, + ) + + os.unlink(abs_path) + + # bucket.on_delete hook + if bucket.on_delete: + logger.debug( + 'running %s (request=%s, bucket=%s)', + bucket.on_delete, + request_id, + bucket_id, + ) + + try: + bucket.on_delete([rel_path]) + + except Exception: + logger.exception( + 'Exception raised while running %s (request=%s, bucket=%s)', + bucket.on_add, + request_id, + bucket_id, + ) + + return success() + + # invalid action + else: + return Response(status=400) + + # index + if not bucket.index: + return Response(status=401) + + if file_name: + abs_path = os.path.join(bucket.get_path(file_name=file_name)) + + if not os.path.exists(abs_path): + return Response(status=404) + + return FileResponse(path=abs_path) + + return TemplateResponse( + name='lona/bucket.html', + context={ + 'bucket': bucket, + }, + ) + + # bad request + return Response(status=400) + + def on_view_cleanup(self, data): + Bucket._close_buckets(request=data.request) diff --git a/lona/default_settings.py b/lona/default_settings.py index 4dfc8237..fed0b7a4 100644 --- a/lona/default_settings.py +++ b/lona/default_settings.py @@ -77,6 +77,7 @@ CORE_MIDDLEWARES = [ 'lona.middlewares.sessions.LonaSessionMiddleware', 'lona.middlewares.lona_messages.LonaMessageMiddleware', + 'lona.buckets.BucketsMiddleware', ] MIDDLEWARES: list[str] = [] @@ -94,6 +95,7 @@ 'lona.shell.commands.lona_middlewares.LonaMiddlewaresCommand', 'lona.shell.commands.lona_templates.LonaTemplatesCommand', 'lona.shell.commands.lona_settings.LonaSettingsCommand', + 'lona.shell.commands.lona_buckets.LonaBucketsCommand', 'lona.shell.commands.lona_routes.LonaRoutesCommand', 'lona.shell.commands.lona_views.LonaViewsCommand', 'lona.shell.commands.lona_info.LonaInfoCommand', @@ -108,6 +110,9 @@ # server AIOHTTP_CLIENT_MAX_SIZE = 1024**2 +# buckets +BUCKETS_URL_PREFIX = '/buckets/' + # feature flags STOP_DAEMON_WHEN_VIEW_FINISHES = True # TODO: remove in 2.0 CLIENT_VERSION = 1 # TODO: remove in 2.0 diff --git a/lona/shell/commands/lona_buckets.py b/lona/shell/commands/lona_buckets.py new file mode 100644 index 00000000..27fbf406 --- /dev/null +++ b/lona/shell/commands/lona_buckets.py @@ -0,0 +1,64 @@ +from rlpython.utils.argument_parser import ReplArgumentParser +from rlpython.utils.table import write_table + +from lona import Bucket + + +class LonaBucketsCommand: + """ + List currently open Lona buckets + """ + + NAME = 'lona_buckets' + + def __init__(self, repl): + self.repl = repl + + def complete(self, text, state, line_buffer): + server = self.repl.globals['server'] + controller = server._view_runtime_controller + + view_runtime_ids = [] + + for view_runtime in controller.iter_view_runtimes(): + view_runtime_ids.append(view_runtime.view_runtime_id) + + view_runtime_ids = sorted(view_runtime_ids) + candidates = [] + + for view_runtime_id in view_runtime_ids: + if view_runtime_id.startswith(text): + candidates.append(view_runtime_id) + + candidates.append(None) + + return candidates[state] + + # command ################################################################# + def run(self, argv): + + # parse command line + argument_parser = ReplArgumentParser( + repl=self.repl, + prog='lona_buckets', + ) + + arguments = vars(argument_parser.parse_args(argv[1:])) + + return self.list_buckets(arguments) + + def list_buckets(self, arguments): + rows = [ + ['Id', 'User', 'Directory', 'URL'], + ] + + for buckets in Bucket._buckets.values(): + for bucket in buckets.values(): + rows.append([ + str(bucket.id), + str(bucket.request.user), + str(bucket.get_path()), + str(bucket.get_url()), + ]) + + write_table(rows, self.repl.write) diff --git a/lona/templates/lona/bucket.html b/lona/templates/lona/bucket.html new file mode 100644 index 00000000..9ad80d81 --- /dev/null +++ b/lona/templates/lona/bucket.html @@ -0,0 +1,54 @@ + + + + + + Lona Bucket {{ bucket.id }} + + + +

Lona Bucket {{ bucket.id }}

+ + + + + + + + + +
Max Files{{ repr(bucket.max_files) }}
Max Size{{ repr(bucket.max_size) }}
+ +

Add File

+
+ + +
+ +

Files

+ {% set file_names=bucket.get_file_names() %} + {% if file_names %} +
    + {% for file_name in file_names %} +
  • + {{ file_name }} +
    + + +
    +
  • + {% endfor %} +
+ {% else %} +

No files yet

+ {% endif %} + + diff --git a/tests/test_600_buckets.py b/tests/test_600_buckets.py new file mode 100644 index 00000000..7d720750 --- /dev/null +++ b/tests/test_600_buckets.py @@ -0,0 +1,411 @@ +from tempfile import TemporaryDirectory +import string +import random +import uuid +import os + +from playwright.async_api import async_playwright +import aiohttp + +from lona.pytest import eventually +from lona import Bucket, View + + +async def test_buckets(lona_app_context): + + # test state + upload_temp_dir = TemporaryDirectory() + download_temp_dir = TemporaryDirectory() + + bucket_objects = [] + bucket_count = [1] + bucket_kwargs = {} + added_files = [] + deleted_files = [] + + # lona view setup + def setup_app(app): + + @app.route('/') + class BucketView(View): + def on_add(self, files): + added_files.extend(files) + + self.show('ON_ADD') + + def on_delete(self, files): + deleted_files.extend(files) + + self.show('ON_DELETE') + + def handle_request(self, request): + bucket_objects.clear() + + for _ in range(bucket_count[0]): + bucket_objects.append( + Bucket( + request=request, + on_add=self.on_add, + on_delete=self.on_delete, + **bucket_kwargs, + ), + ) + + return 'BUCKETS SETUP' + + context = await lona_app_context(setup_app) + url = context.make_url('/') + + # helper + def get_bucket(index=0): + return bucket_objects[index] + + def generate_file(size=256): + file_name = f'{uuid.uuid1()}.txt' + file_path = os.path.join(upload_temp_dir.name, file_name) + + with open(file_path, 'w+') as handle: + for _ in range(size): + handle.write(random.choice(string.ascii_letters)) + + return file_name, file_path + + def compare_files(path_a, path_b): + return open(path_a, 'r').read() == open(path_b, 'r').read() + + async def setup_buckets( + page, + count=1, + index=True, + max_files=None, + max_size=None, + ): + + bucket_count[0] = count + + bucket_kwargs.update({ + 'index': index, + 'max_files': max_files, + 'max_size': max_size, + }) + + await page.goto(url) + await page.wait_for_selector('#lona:has-text("BUCKETS SETUP")') + + added_files.clear() + added_files.clear() + + async def close_buckets(page): + await page.goto('about:blank') + + async def files_added(page): + await page.wait_for_selector('#lona:has-text("ON_ADD")') + + async def files_deleted(page): + await page.wait_for_selector('#lona:has-text("ON_DELETE")') + + async def upload_files(*files, index=0, extra_data=None): + data = {} + url = context.make_url(get_bucket(index=index).get_add_url()) + + for file_name, file_path in files: + data[file_name] = open(file_path, 'rb') + + async with aiohttp.ClientSession() as session: + response = await session.post( + url=url, + data={ + **data, + **(extra_data or {}), + }, + ) + + return response + + async def get_index(index=0): + url = context.make_url(get_bucket(index=index).get_url()) + + async with aiohttp.ClientSession() as session: + return (await session.get(url)) + + async def download_file(file_name, index=0): + url = context.make_url( + get_bucket(index=index).get_url(file_name=file_name), + ) + + file_path = os.path.join(download_temp_dir.name, file_name) + + async with aiohttp.ClientSession() as session: + response = await session.get(url) + + if not response.status == 200: + return response, '', '' + + with open(file_path, 'wb') as fd: + async for chunk in response.content.iter_chunked(8): + fd.write(chunk) + + return response, file_name, file_path + + async def delete_file(file_name, index=0): + url = context.make_url(get_bucket(index=index).get_delete_url()) + + async with aiohttp.ClientSession() as session: + response = await session.post( + url=url, + data={ + 'name': file_name, + }, + ) + + return response + + # test code + async with async_playwright() as p: + browser = await p.chromium.launch() + browser_context = await browser.new_context() + page = await browser_context.new_page() + + # add files ########################################################### + await setup_buckets(page) + + # upload files + file_name_1, file_path_1 = generate_file(size=100) + file_name_2, file_path_2 = generate_file(size=50) + + response = await upload_files( + (file_name_1, file_path_1), + (file_name_2, file_path_2), + ) + + assert response.status == 200 + + await files_added(page) + + # check hooks + assert sorted(added_files) == sorted([file_name_1, file_name_2]) + assert deleted_files == [] + + # check uploaded files + assert compare_files(file_path_1, get_bucket().get_path(file_name_1)) + assert compare_files(file_path_2, get_bucket().get_path(file_name_2)) + + assert ( + sorted([file_name_1, file_name_2]) == + sorted(get_bucket().get_file_names()) + ) + + # index ############################################################### + response = await get_index() + response_text = await response.text() + + assert response.status == 200 + assert file_name_1 in response_text + assert file_name_2 in response_text + + # get files ########################################################### + # file 1 + result = await download_file(file_name_1) + response, download_file_name_1, download_file_path_1 = result + + assert response.status == 200 + assert compare_files(file_path_1, download_file_path_1) + + # file 2 + result = await download_file(file_name_2) + response, download_file_name_2, download_file_path_2 = result + + assert response.status == 200 + assert compare_files(file_path_2, download_file_path_2) + + # file not found + result = await download_file('unknown-file.txt') + response, download_file_name_2, download_file_path_2 = result + + assert response.status == 404 + + # delete files ######################################################## + response = await delete_file(file_name_1) + + assert response.status == 200 + + await files_deleted(page) + + assert not os.path.exists(get_bucket().get_path(file_name_1)) + assert os.path.exists(get_bucket().get_path(file_name_2)) + + # cleanup ############################################################# + await close_buckets(page) + + for attempt in eventually(): + async with attempt: + assert not os.path.exists(get_bucket().get_path()) + + # max files ########################################################### + # too many files in one request + await setup_buckets(page, max_files=2) + + file_name_1, file_path_1 = generate_file(size=8) + file_name_2, file_path_2 = generate_file(size=8) + file_name_3, file_path_3 = generate_file(size=8) + + response = await upload_files( + (file_name_1, file_path_1), + (file_name_2, file_path_2), + (file_name_3, file_path_3), + ) + + assert response.status == 400 + + assert not os.path.exists(get_bucket().get_path(file_name_1)) + assert not os.path.exists(get_bucket().get_path(file_name_2)) + assert not os.path.exists(get_bucket().get_path(file_name_3)) + + await close_buckets(page) + + # too many files in two requests + await setup_buckets(page, max_files=2) + + file_name_1, file_path_1 = generate_file(size=8) + file_name_2, file_path_2 = generate_file(size=8) + + response = await upload_files( + (file_name_1, file_path_1), + (file_name_2, file_path_2), + ) + + assert response.status == 200 + + file_name_3, file_path_3 = generate_file(size=8) + + response = await upload_files( + (file_name_3, file_path_3), + ) + + assert response.status == 400 + + assert os.path.exists(get_bucket().get_path(file_name_1)) + assert os.path.exists(get_bucket().get_path(file_name_2)) + assert not os.path.exists(get_bucket().get_path(file_name_3)) + + await close_buckets(page) + + # max size ############################################################ + # too much data in one request + await setup_buckets(page, max_size=100) + + file_name_1, file_path_1 = generate_file(size=50) + file_name_2, file_path_2 = generate_file(size=50) + file_name_3, file_path_3 = generate_file(size=50) + + response = await upload_files( + (file_name_1, file_path_1), + (file_name_2, file_path_2), + (file_name_3, file_path_3), + ) + + assert response.status == 400 + + assert not os.path.exists(get_bucket().get_path(file_name_1)) + assert not os.path.exists(get_bucket().get_path(file_name_2)) + assert not os.path.exists(get_bucket().get_path(file_name_3)) + + await close_buckets(page) + + # too much files in two requests + await setup_buckets(page, max_size=100) + + file_name_1, file_path_1 = generate_file(size=50) + file_name_2, file_path_2 = generate_file(size=50) + + response = await upload_files( + (file_name_1, file_path_1), + (file_name_2, file_path_2), + ) + + assert response.status == 200 + + file_name_3, file_path_3 = generate_file(size=50) + + response = await upload_files( + (file_name_3, file_path_3), + ) + + assert response.status == 400 + + assert os.path.exists(get_bucket().get_path(file_name_1)) + assert os.path.exists(get_bucket().get_path(file_name_2)) + assert not os.path.exists(get_bucket().get_path(file_name_3)) + + await close_buckets(page) + + # mixed post ########################################################## + await setup_buckets(page) + + # upload files + file_name_1, file_path_1 = generate_file(size=8) + + response = await upload_files( + (file_name_1, file_path_1), + extra_data={ + 'foo': 'bar', + }, + ) + + assert response.status == 200 + + await files_added(page) + + assert compare_files(file_path_1, get_bucket().get_path(file_name_1)) + + await close_buckets(page) + + # multiple buckets #################################################### + await setup_buckets(page, count=2) + + file_name_1, file_path_1 = generate_file(size=8) + file_name_2, file_path_2 = generate_file(size=8) + + await upload_files((file_name_1, file_path_1), index=0) + await upload_files((file_name_2, file_path_2), index=1) + + for attempt in eventually(): + async with attempt: + assert os.path.exists(get_bucket(0).get_path(file_name_1)) + assert not os.path.exists(get_bucket(0).get_path(file_name_2)) + + assert not os.path.exists(get_bucket(1).get_path(file_name_1)) + assert os.path.exists(get_bucket(1).get_path(file_name_2)) + + await close_buckets(page) + + for attempt in eventually(): + async with attempt: + assert not os.path.exists(get_bucket(0).get_path(file_name_1)) + assert not os.path.exists(get_bucket(0).get_path(file_name_2)) + + assert not os.path.exists(get_bucket(1).get_path(file_name_1)) + assert not os.path.exists(get_bucket(1).get_path(file_name_2)) + + # disabled index ###################################################### + await setup_buckets(page, index=False) + + file_name_1, file_path_1 = generate_file(size=8) + + response = await upload_files( + (file_name_1, file_path_1), + ) + + # index + response = await get_index() + + assert response.status == 401 + + # get file + result = await download_file(file_name_1) + response, download_file_name_1, download_file_path_1 = result + + assert response.status == 401 + assert not download_file_path_1 + + await close_buckets(page)