From 89aceb7940b04f72de642eec5a4b5bdcdf26f7cc Mon Sep 17 00:00:00 2001 From: Ivo Dilov Date: Fri, 15 Mar 2024 15:38:18 +0200 Subject: [PATCH] Adding update, append and delete asv benchmarks - Added under the ModificationFunctions asv group - Will be useful to monitor for regressions in these commonly used functions --- asv.conf.json | 2 +- python/.asv/results/benchmarks.json | 1169 +++++++++++++++++++++++++- python/benchmarks/basic_functions.py | 133 ++- python/benchmarks/common.py | 7 + 4 files changed, 1282 insertions(+), 29 deletions(-) diff --git a/asv.conf.json b/asv.conf.json index 09ff07611b1..2620bd88cdd 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -42,7 +42,7 @@ // List of branches to benchmark. If not provided, defaults to "master" // (for git) or "default" (for mercurial). - "branches": ["master"], // for git + "branches": ["add-modification-asv-benchmarks"], // for git // "branches": ["default"], // for mercurial // The DVCS being used. If not set, it will be automatically diff --git a/python/.asv/results/benchmarks.json b/python/.asv/results/benchmarks.json index f414915218c..37fc87f0a59 100644 --- a/python/.asv/results/benchmarks.json +++ b/python/.asv/results/benchmarks.json @@ -16,7 +16,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -39,7 +39,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -62,7 +62,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -85,7 +85,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -108,7 +108,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -131,7 +131,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -154,7 +154,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -177,7 +177,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -200,7 +200,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -223,7 +223,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -246,7 +246,7 @@ "1000" ] ], - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "peakmemory", "unit": "bytes", @@ -274,7 +274,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -303,7 +303,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -332,7 +332,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -361,7 +361,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -390,7 +390,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -419,7 +419,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -448,7 +448,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -477,7 +477,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -506,7 +506,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -535,7 +535,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -564,7 +564,7 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", @@ -593,13 +593,274 @@ "repeat": 0, "rounds": 2, "sample_time": 0.01, - "setup_cache_key": "basic_functions:27", + "setup_cache_key": "basic_functions:34", "timeout": 6000, "type": "time", "unit": "seconds", "version": "acb9cffa129b783906ab570d4314933cc6087e83922ac90db8b061ca5819d15c", "warmup_time": -1 }, + "basic_functions.ModificationFunctions.time_append_large": { + "code": "class ModificationFunctions:\n def time_append_large(self, rows, num_symbols):\n [self.lib.append(f\"{sym}_sym\", self.df_append_large) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_append_large", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "8e81314eed85bb6042a60d2be1e7d26a4f24aea7a78b137b8f1b9ea335109f5a", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_append_short_wide": { + "code": "class ModificationFunctions:\n def time_append_short_wide(self, rows, num_symbols):\n self.lib_short_wide.append(\"short_wide_sym\", self.df_append_short_wide)\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_append_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "7b574d1a2264153adb468c616f89a1a0863de23bad1424bf83f818576c729794", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_append_single": { + "code": "class ModificationFunctions:\n def time_append_single(self, rows, num_symbols):\n [self.lib.append(f\"{sym}_sym\", self.df_append_single) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_append_single", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "0d0c1f6d0a490270f818766427739e76152dd3a23ad243298ac800f31a0bbae5", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_delete": { + "code": "class ModificationFunctions:\n def time_delete(self, rows, num_symbols):\n [self.lib.delete(f\"{sym}_sym\") for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_delete", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "4e8a66ecd26bcd6e36514d310623504c7e55b2392577d20fe55c12c9d89fa300", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_delete_short_wide": { + "code": "class ModificationFunctions:\n def time_delete_short_wide(self, rows, num_symbols):\n self.lib_short_wide.delete(\"short_wide_sym\")\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_delete_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "9ae7ce5de373f563eb76f29a4b3e552817aeec2c6e4a47acf171c3fa37f70727", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_update_half": { + "code": "class ModificationFunctions:\n def time_update_half(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_half) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_update_half", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "1d05bccd3acb20fcc7cdf800676bb9645888b865e659f5b59b7c8eeb22848fdf", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_update_short_wide": { + "code": "class ModificationFunctions:\n def time_update_short_wide(self, rows, num_symbols):\n self.lib_short_wide.update(\"short_wide_sym\", self.df_update_short_wide)\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_update_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "8d3e87b791c66e10b7a943d26ad4f5a855533b55c4c55a164f5b09e0ed94e82f", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_update_single": { + "code": "class ModificationFunctions:\n def time_update_single(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_single) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_update_single", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "44f6a147d23c75cac4aae9afa434d647fa55ff124b399d662cec83393f623ac8", + "warmup_time": -1 + }, + "basic_functions.ModificationFunctions.time_update_upsert": { + "code": "class ModificationFunctions:\n def time_update_upsert(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_upsert, upsert=True) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "basic_functions.ModificationFunctions.time_update_upsert", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "5afc0225d1e2aaccb4d080251c7b964defdce4f3bea935e3e35fa3f227bc52f2", + "warmup_time": -1 + }, "list_functions.ListFunctions.peakmem_list_symbols": { "code": "class ListFunctions:\n def peakmem_list_symbols(self, num_symbols):\n self.lib.list_symbols()\n\n def setup(self, num_symbols):\n self.ac = Arctic(\"lmdb://list_functions\")\n self.lib = self.ac[f\"{num_symbols}_num_symbols\"]\n\n def setup_cache(self):\n self.ac = Arctic(\"lmdb://list_functions\")\n \n num_symbols = ListFunctions.params\n for syms in num_symbols:\n lib_name = f\"{syms}_num_symbols\"\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n for sym in range(syms):\n lib.write(f\"{sym}_sym\", generate_benchmark_df(ListFunctions.rows))", "name": "list_functions.ListFunctions.peakmem_list_symbols", @@ -1098,5 +1359,867 @@ "version": "ed1d1ccb6458095a627788bfa2b53afa310ca8c8118a6405c91204724c865d6c", "warmup_time": -1 }, + "quick_test_run.BasicFunctions.peakmem_read": { + "code": "class BasicFunctions:\n def peakmem_read(self, rows, num_symbols):\n [self.lib.read(f\"{sym}_sym\").data for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "ff0fa49989b607a8cd79974adb22a64c2c93fe486237ff2f773456beda8bacb8" + }, + "quick_test_run.BasicFunctions.peakmem_read_batch": { + "code": "class BasicFunctions:\n def peakmem_read_batch(self, rows, num_symbols):\n read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_batch", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "2be00ac5794510876a781db96a7e0e1b7c5f00e445a4acf8ab1b19c5088e0729" + }, + "quick_test_run.BasicFunctions.peakmem_read_batch_with_columns": { + "code": "class BasicFunctions:\n def peakmem_read_batch_with_columns(self, rows, num_symbols):\n COLS = [\"value\"]\n read_reqs = [\n ReadRequest(f\"{sym}_sym\", columns=COLS) for sym in range(num_symbols)\n ]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_batch_with_columns", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "7cd80ad56576a40b894a7d0228ab639c13387e91d3a01cb8abb18aa46664e003" + }, + "quick_test_run.BasicFunctions.peakmem_read_batch_with_date_ranges": { + "code": "class BasicFunctions:\n def peakmem_read_batch_with_date_ranges(self, rows, num_symbols):\n read_reqs = [\n ReadRequest(f\"{sym}_sym\", date_range=BasicFunctions.DATE_RANGE)\n for sym in range(num_symbols)\n ]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_batch_with_date_ranges", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "d96c63936b6ebe42f14b1c74dcc0f455ef7b405e16cfbdc0a8d288a645b0b043" + }, + "quick_test_run.BasicFunctions.peakmem_read_short_wide": { + "code": "class BasicFunctions:\n def peakmem_read_short_wide(self, rows, num_symbols):\n lib = self.ac[get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)]\n lib.read(\"short_wide_sym\").data\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_short_wide", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "638a41d1feadbdca303c578e4ac7cf0f42729b3fcfd0d39fd45ae938d2ffbb8a" + }, + "quick_test_run.BasicFunctions.peakmem_read_with_columns": { + "code": "class BasicFunctions:\n def peakmem_read_with_columns(self, rows, num_symbols):\n COLS = [\"value\"]\n [self.lib.read(f\"{sym}_sym\", columns=COLS).data for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_with_columns", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "b8a3dd2e09d1428bbaef260c060c203d64645c174954ed5802d02d162c962f48" + }, + "quick_test_run.BasicFunctions.peakmem_read_with_date_ranges": { + "code": "class BasicFunctions:\n def peakmem_read_with_date_ranges(self, rows, num_symbols):\n [\n self.lib.read(f\"{sym}_sym\", date_range=BasicFunctions.DATE_RANGE).data\n for sym in range(num_symbols)\n ]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_read_with_date_ranges", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "7e9a5d1e08b867dbaed4633419f9837bdd126be1dd70bb878313661d27d4f1a7" + }, + "quick_test_run.BasicFunctions.peakmem_write": { + "code": "class BasicFunctions:\n def peakmem_write(self, rows, num_symbols):\n for sym in range(num_symbols):\n self.fresh_lib.write(f\"{sym}_sym\", self.df)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_write", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "0855652987ef89344d1e526c8c739022c83f59a960b30c6c04b5bad2ac5e4559" + }, + "quick_test_run.BasicFunctions.peakmem_write_batch": { + "code": "class BasicFunctions:\n def peakmem_write_batch(self, rows, num_symbols):\n payloads = [WritePayload(f\"{sym}_sym\", self.df) for sym in range(num_symbols)]\n self.fresh_lib.write_batch(payloads)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_write_batch", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "9082643676fbdd29859e40a27924b6e66e4c3b5dca44f7adb7abbdaecbb21403" + }, + "quick_test_run.BasicFunctions.peakmem_write_short_wide": { + "code": "class BasicFunctions:\n def peakmem_write_short_wide(self, rows, num_symbols):\n self.fresh_lib.write(\"short_wide_sym\", self.df_short_wide)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_write_short_wide", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "3d5fd2504315fa5a621a184dc8a41da2fcf9b50bbece4db6fee59b1942406088" + }, + "quick_test_run.BasicFunctions.peakmem_write_staged": { + "code": "class BasicFunctions:\n def peakmem_write_staged(self, rows, num_symbols):\n for sym in range(num_symbols):\n self.fresh_lib.write(f\"{sym}_sym\", self.df, staged=True)\n \n for sym in range(num_symbols):\n self.fresh_lib._nvs.compact_incomplete(f\"{sym}_sym\", False, False)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "name": "quick_test_run.BasicFunctions.peakmem_write_staged", + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "peakmemory", + "unit": "bytes", + "version": "d40f7c2a115e9be8b63e5a103f688b1f0b2e607dd9b6556a1b7cce7e4e965bc7" + }, + "quick_test_run.BasicFunctions.time_read": { + "code": "class BasicFunctions:\n def time_read(self, rows, num_symbols):\n [self.lib.read(f\"{sym}_sym\").data for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "20dcdc0849ccc90831237dd53140880417f2c3cf65d6fbb01b6197d2764d92d3", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_batch": { + "code": "class BasicFunctions:\n def time_read_batch(self, rows, num_symbols):\n read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_batch", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "b5ff7d93d7ef652e32c98e67bae5718f9fec1d17ab6ff494b122f840b4c55221", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_batch_pure": { + "code": "class BasicFunctions:\n def time_read_batch_pure(self, rows, num_symbols):\n self.lib.read_batch(self.read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_batch_pure", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "f31363259c8422fe15df9406717b08f59cb4a041f890055bd55a1bbee9409306", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_batch_with_columns": { + "code": "class BasicFunctions:\n def time_read_batch_with_columns(self, rows, num_symbols):\n COLS = [\"value\"]\n read_reqs = [\n ReadRequest(f\"{sym}_sym\", columns=COLS) for sym in range(num_symbols)\n ]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_batch_with_columns", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "c260f11ce4110feeb2610709724bdd429ad1186d795adaa9a06968a3a2812f12", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_batch_with_date_ranges": { + "code": "class BasicFunctions:\n def time_read_batch_with_date_ranges(self, rows, num_symbols):\n read_reqs = [\n ReadRequest(f\"{sym}_sym\", date_range=BasicFunctions.DATE_RANGE)\n for sym in range(num_symbols)\n ]\n self.lib.read_batch(read_reqs)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_batch_with_date_ranges", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "68aff748fa1280b20a855c85b7987f2e0b960c58cddf77adc6f08780a7025f20", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_short_wide": { + "code": "class BasicFunctions:\n def time_read_short_wide(self, rows, num_symbols):\n lib = self.ac[get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)]\n lib.read(\"short_wide_sym\").data\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_short_wide", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "d8c7ac3e4436a55ab33a4b3a8776ae827ba20b4c66bf0cc62a6606132cdd6860", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_with_columns": { + "code": "class BasicFunctions:\n def time_read_with_columns(self, rows, num_symbols):\n COLS = [\"value\"]\n [self.lib.read(f\"{sym}_sym\", columns=COLS).data for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_with_columns", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "0f58babd41c8af4a7e84064fbde378429a70ac9051c8f64c0c67e8fee20a2984", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_read_with_date_ranges": { + "code": "class BasicFunctions:\n def time_read_with_date_ranges(self, rows, num_symbols):\n [\n self.lib.read(f\"{sym}_sym\", date_range=BasicFunctions.DATE_RANGE).data\n for sym in range(num_symbols)\n ]\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_read_with_date_ranges", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "570d8f26da8bef8625b36cc8c735dbf80b4ea0804be4d670cab6d660cf1e8a9d", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_write": { + "code": "class BasicFunctions:\n def time_write(self, rows, num_symbols):\n for sym in range(num_symbols):\n self.fresh_lib.write(f\"{sym}_sym\", self.df)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_write", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "193d2b9f0a325f37f955e5bd9e460ea38c23d6c9b5e1b790255b9348f4a626d9", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_write_batch": { + "code": "class BasicFunctions:\n def time_write_batch(self, rows, num_symbols):\n payloads = [WritePayload(f\"{sym}_sym\", self.df) for sym in range(num_symbols)]\n self.fresh_lib.write_batch(payloads)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_write_batch", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "24df3392a32f8efda28a6dbeaa9bede81077b145a2ef1d7f39716bebf16a69a7", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_write_short_wide": { + "code": "class BasicFunctions:\n def time_write_short_wide(self, rows, num_symbols):\n self.fresh_lib.write(\"short_wide_sym\", self.df_short_wide)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_write_short_wide", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "e9e4c59fd2a83b81f11f909822b7c46d7d393740db36738f09d495cf5ff945cc", + "warmup_time": -1 + }, + "quick_test_run.BasicFunctions.time_write_staged": { + "code": "class BasicFunctions:\n def time_write_staged(self, rows, num_symbols):\n for sym in range(num_symbols):\n self.fresh_lib.write(f\"{sym}_sym\", self.df, staged=True)\n \n for sym in range(num_symbols):\n self.fresh_lib._nvs.compact_incomplete(f\"{sym}_sym\", False, False)\n\n def setup(self, rows, num_symbols):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n self.read_reqs = [ReadRequest(f\"{sym}_sym\") for sym in range(num_symbols)]\n \n self.df = generate_pseudo_random_dataframe(rows)\n self.df_short_wide = generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n )\n \n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.fresh_lib = self.get_fresh_lib()\n\n def setup_cache(self):\n self.ac = Arctic(BasicFunctions.CONNECTION_STRING)\n num_rows, num_symbols = BasicFunctions.params\n \n self.dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib)\n self.ac.create_library(lib)\n lib = self.ac[lib]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.dfs[rows])\n \n lib_name = get_prewritten_lib_name(BasicFunctions.WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe(\n BasicFunctions.WIDE_DF_ROWS, BasicFunctions.WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.BasicFunctions.time_write_staged", + "number": 5, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:34", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "acb9cffa129b783906ab570d4314933cc6087e83922ac90db8b061ca5819d15c", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_append_large": { + "code": "class ModificationFunctions:\n def time_append_large(self, rows, num_symbols):\n [self.lib.append(f\"{sym}_sym\", self.df_append_large) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_append_large", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "8e81314eed85bb6042a60d2be1e7d26a4f24aea7a78b137b8f1b9ea335109f5a", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_append_short_wide": { + "code": "class ModificationFunctions:\n def time_append_short_wide(self, rows, num_symbols):\n self.lib_short_wide.append(\"short_wide_sym\", self.df_append_short_wide)\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_append_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "7b574d1a2264153adb468c616f89a1a0863de23bad1424bf83f818576c729794", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_append_single": { + "code": "class ModificationFunctions:\n def time_append_single(self, rows, num_symbols):\n [self.lib.append(f\"{sym}_sym\", self.df_append_single) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_append_single", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "0d0c1f6d0a490270f818766427739e76152dd3a23ad243298ac800f31a0bbae5", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_delete": { + "code": "class ModificationFunctions:\n def time_delete(self, rows, num_symbols):\n [self.lib.delete(f\"{sym}_sym\") for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_delete", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "4e8a66ecd26bcd6e36514d310623504c7e55b2392577d20fe55c12c9d89fa300", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_delete_short_wide": { + "code": "class ModificationFunctions:\n def time_delete_short_wide(self, rows, num_symbols):\n self.lib_short_wide.delete(\"short_wide_sym\")\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_delete_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "9ae7ce5de373f563eb76f29a4b3e552817aeec2c6e4a47acf171c3fa37f70727", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_update_half": { + "code": "class ModificationFunctions:\n def time_update_half(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_half) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_update_half", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "1d05bccd3acb20fcc7cdf800676bb9645888b865e659f5b59b7c8eeb22848fdf", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_update_short_wide": { + "code": "class ModificationFunctions:\n def time_update_short_wide(self, rows, num_symbols):\n self.lib_short_wide.update(\"short_wide_sym\", self.df_update_short_wide)\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_update_short_wide", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "8d3e87b791c66e10b7a943d26ad4f5a855533b55c4c55a164f5b09e0ed94e82f", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_update_single": { + "code": "class ModificationFunctions:\n def time_update_single(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_single) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_update_single", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "44f6a147d23c75cac4aae9afa434d647fa55ff124b399d662cec83393f623ac8", + "warmup_time": -1 + }, + "quick_test_run.ModificationFunctions.time_update_upsert": { + "code": "class ModificationFunctions:\n def time_update_upsert(self, rows, num_symbols):\n [self.lib.update(f\"{sym}_sym\", self.df_update_upsert, upsert=True) for sym in range(num_symbols)]\n\n def setup(self, rows, num_symbols):\n def get_time_at_fraction_of_df(fraction, rows=rows):\n end_time = pd.Timestamp(\"1/1/2023\")\n time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1)))\n return end_time + time_delta\n \n self.df_update_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(0.5))\n self.df_update_half = generate_pseudo_random_dataframe(rows//2, \"s\", get_time_at_fraction_of_df(0.75))\n self.df_update_upsert = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(1.5))\n self.df_append_single = generate_pseudo_random_dataframe(1, \"s\", get_time_at_fraction_of_df(1.1))\n self.df_append_large = generate_pseudo_random_dataframe(rows, \"s\", get_time_at_fraction_of_df(2))\n \n self.df_update_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n )\n self.df_append_short_wide = generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS, \"s\", get_time_at_fraction_of_df(2, rows=WIDE_DF_ROWS)\n )\n \n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n self.lib = self.ac[get_prewritten_lib_name(rows)]\n self.lib_short_wide = self.ac[get_prewritten_lib_name(WIDE_DF_ROWS)]\n\n def setup_cache(self):\n self.ac = Arctic(ModificationFunctions.CONNECTION_STRING)\n num_rows, num_symbols = ModificationFunctions.params\n \n self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows}\n for rows in num_rows:\n lib_name = get_prewritten_lib_name(rows)\n self.ac.delete_library(lib_name)\n self.ac.create_library(lib_name)\n lib = self.ac[lib_name]\n for sym in range(num_symbols[-1]):\n lib.write(f\"{sym}_sym\", self.init_dfs[rows])\n \n lib_name = get_prewritten_lib_name(WIDE_DF_ROWS)\n self.ac.delete_library(lib_name)\n lib = self.ac.create_library(lib_name)\n lib.write(\n \"short_wide_sym\",\n generate_random_floats_dataframe_with_index(\n WIDE_DF_ROWS, WIDE_DF_COLS\n ),\n )", + "min_run_count": 2, + "name": "quick_test_run.ModificationFunctions.time_update_upsert", + "number": 1, + "param_names": [ + "rows", + "num_symbols" + ], + "params": [ + [ + "100000", + "150000" + ], + [ + "500", + "1000" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "setup_cache_key": "basic_functions:204", + "timeout": 6000, + "type": "time", + "unit": "seconds", + "version": "5afc0225d1e2aaccb4d080251c7b964defdce4f3bea935e3e35fa3f227bc52f2", + "warmup_time": -1 + }, "version": 2 } \ No newline at end of file diff --git a/python/benchmarks/basic_functions.py b/python/benchmarks/basic_functions.py index aa3d244b828..62528dbabc1 100644 --- a/python/benchmarks/basic_functions.py +++ b/python/benchmarks/basic_functions.py @@ -12,17 +12,24 @@ from .common import * +# Common parameters between BasicFunctions and ModificationFunctions +WIDE_DF_ROWS = 5_000 +WIDE_DF_COLS = 30_000 +# PARAMS = ([1_000, 1_500], [50, 100]) +PARAMS = ([100_000, 150_000], [500, 1000]) +PARAM_NAMES = ["rows", "num_symbols"] + + class BasicFunctions: number = 5 timeout = 6000 CONNECTION_STRING = "lmdb://basic_functions?map_size=20GB" - WIDE_DF_ROWS = 5_000 - WIDE_DF_COLS = 30_000 + WIDE_DF_ROWS = WIDE_DF_ROWS + WIDE_DF_COLS = WIDE_DF_COLS DATE_RANGE = pd.date_range("2023-01-01", "2023-01-01") - # params = ([1_000, 15_00], [50, 100]) - params = ([100_000, 150_000], [500, 1000]) - param_names = ["rows", "num_symbols"] + params = PARAMS + param_names = PARAM_NAMES def setup_cache(self): self.ac = Arctic(BasicFunctions.CONNECTION_STRING) @@ -179,3 +186,119 @@ def peakmem_read_batch_with_date_ranges(self, rows, num_symbols): for sym in range(num_symbols) ] self.lib.read_batch(read_reqs) + + + +class ModificationFunctions: + """ + Modification functions (update, append, delete) need a different setup/teardown process, thus we place them in a + separate group. + """ + number = 1 # We do a single run between setup and teardown because we e.g. can't delete a symbol twice + timeout = 6000 + CONNECTION_STRING = "lmdb://modification_functions?map_size=20GB" + WIDE_DF_ROWS = WIDE_DF_ROWS + WIDE_DF_COLS = WIDE_DF_COLS + + params = PARAMS + param_names = PARAM_NAMES + + def setup_cache(self): + self.ac = Arctic(ModificationFunctions.CONNECTION_STRING) + num_rows, num_symbols = ModificationFunctions.params + + self.init_dfs = {rows: generate_pseudo_random_dataframe(rows) for rows in num_rows} + for rows in num_rows: + lib_name = get_prewritten_lib_name(rows) + self.ac.delete_library(lib_name) + self.ac.create_library(lib_name) + lib = self.ac[lib_name] + for sym in range(num_symbols[-1]): + lib.write(f"{sym}_sym", self.init_dfs[rows]) + + lib_name = get_prewritten_lib_name(ModificationFunctions.WIDE_DF_ROWS) + self.ac.delete_library(lib_name) + lib = self.ac.create_library(lib_name) + lib.write( + "short_wide_sym", + generate_random_floats_dataframe_with_index( + ModificationFunctions.WIDE_DF_ROWS, ModificationFunctions.WIDE_DF_COLS + ), + ) + + + def setup(self, rows, num_symbols): + def get_time_at_fraction_of_df(fraction, rows=rows): + end_time = pd.Timestamp("1/1/2023") + time_delta = pd.tseries.offsets.DateOffset(seconds=round(rows * (fraction-1))) + return end_time + time_delta + + self.df_update_single = generate_pseudo_random_dataframe(1, "s", get_time_at_fraction_of_df(0.5)) + self.df_update_half = generate_pseudo_random_dataframe(rows//2, "s", get_time_at_fraction_of_df(0.75)) + self.df_update_upsert = generate_pseudo_random_dataframe(rows, "s", get_time_at_fraction_of_df(1.5)) + self.df_append_single = generate_pseudo_random_dataframe(1, "s", get_time_at_fraction_of_df(1.1)) + self.df_append_large = generate_pseudo_random_dataframe(rows, "s", get_time_at_fraction_of_df(2)) + + self.df_update_short_wide = generate_random_floats_dataframe_with_index( + ModificationFunctions.WIDE_DF_ROWS, ModificationFunctions.WIDE_DF_COLS + ) + self.df_append_short_wide = generate_random_floats_dataframe_with_index( + ModificationFunctions.WIDE_DF_ROWS, ModificationFunctions.WIDE_DF_COLS, "s", get_time_at_fraction_of_df(2, rows=ModificationFunctions.WIDE_DF_ROWS) + ) + + self.ac = Arctic(ModificationFunctions.CONNECTION_STRING) + self.lib = self.ac[get_prewritten_lib_name(rows)] + self.lib_short_wide = self.ac[get_prewritten_lib_name(ModificationFunctions.WIDE_DF_ROWS)] + + + def teardown(self, rows, num_symbols): + def restore_symbol(lib, symbol, df_if_missing): + versions = lib._nvs.list_versions(symbol=symbol) + if len(versions) == 0: + # If a symbol got deleted we rewrite it + lib.write(symbol, df_if_missing) + elif len(versions) > 1: + # If a symbol got a new version (via update or append) we keep only the oldest version + versions.sort(key=lambda entry: entry["version"]) + for version in versions[1:]: + lib._nvs.delete_version(symbol, version["version"]) + + init_df = generate_pseudo_random_dataframe(rows) + for sym in range(num_symbols): + symbol = f"{sym}_sym" + restore_symbol(self.lib, symbol, init_df) + + init_df_short_wide = generate_random_floats_dataframe_with_index(ModificationFunctions.WIDE_DF_ROWS, ModificationFunctions.WIDE_DF_ROWS) + restore_symbol(self.lib_short_wide, "short_wide_sym", init_df_short_wide) + + del self.lib + del self.lib_short_wide + del self.ac + + + def time_update_single(self, rows, num_symbols): + [self.lib.update(f"{sym}_sym", self.df_update_single) for sym in range(num_symbols)] + + def time_update_half(self, rows, num_symbols): + [self.lib.update(f"{sym}_sym", self.df_update_half) for sym in range(num_symbols)] + + def time_update_upsert(self, rows, num_symbols): + [self.lib.update(f"{sym}_sym", self.df_update_upsert, upsert=True) for sym in range(num_symbols)] + + def time_update_short_wide(self, rows, num_symbols): + self.lib_short_wide.update("short_wide_sym", self.df_update_short_wide) + + def time_append_single(self, rows, num_symbols): + [self.lib.append(f"{sym}_sym", self.df_append_single) for sym in range(num_symbols)] + + def time_append_large(self, rows, num_symbols): + [self.lib.append(f"{sym}_sym", self.df_append_large) for sym in range(num_symbols)] + + def time_append_short_wide(self, rows, num_symbols): + self.lib_short_wide.append("short_wide_sym", self.df_append_short_wide) + + def time_delete(self, rows, num_symbols): + [self.lib.delete(f"{sym}_sym") for sym in range(num_symbols)] + + def time_delete_short_wide(self, rows, num_symbols): + self.lib_short_wide.delete("short_wide_sym") diff --git a/python/benchmarks/common.py b/python/benchmarks/common.py index d05f888acd8..e538309b27e 100644 --- a/python/benchmarks/common.py +++ b/python/benchmarks/common.py @@ -37,6 +37,13 @@ def generate_random_floats_dataframe(num_rows, num_cols): return pd.DataFrame(data, columns=columns) +def generate_random_floats_dataframe_with_index(num_rows, num_cols, freq="s", end_timestamp="1/1/2023"): + timestamps = pd.date_range(end=end_timestamp, periods=num_rows, freq=freq) + df = generate_random_floats_dataframe(num_rows, num_cols) + df.index = timestamps + return df + + def generate_benchmark_df(n, freq="min", end_timestamp="1/1/2023"): timestamps = pd.date_range(end=end_timestamp, periods=n, freq=freq) k = n // 10