From 3d62d319d7b2ea7de54903a7b91719f9d8461418 Mon Sep 17 00:00:00 2001 From: stephenyau Date: Mon, 29 Apr 2024 17:57:12 +0800 Subject: [PATCH] feat: break df into chunks, handle df larger than memory --- docs/getting-started/basics.md | 8 +- pfund/accounts/account_crypto.py | 2 +- pfund/brokers/broker_base.py | 2 +- pfund/brokers/broker_crypto.py | 2 +- pfund/brokers/ib/broker_ib.py | 2 +- pfund/brokers/ib/ib_api.py | 2 +- pfund/const/__init__.py | 2 +- pfund/const/{commons.py => common.py} | 0 pfund/data_tools/data_tool_pandas.py | 77 +++--- pfund/data_tools/data_tool_polars.py | 81 ++++++- pfund/datas/data_bar.py | 25 +- pfund/datas/resolution.py | 2 +- pfund/engines/backtest_engine.py | 144 ++++++----- pfund/engines/base_engine.py | 2 +- pfund/exchanges/bybit/ws_api.py | 30 +-- pfund/mixins/backtest.py | 11 +- pfund/models/model_backtest.py | 7 +- pfund/models/model_base.py | 3 + pfund/positions/position_ib.py | 2 +- pfund/products/product_crypto.py | 6 +- pfund/strategies/strategy_base.py | 5 +- pfund/types/common_literals.py | 2 +- pfund/utils/utils.py | 2 +- poetry.lock | 328 ++++++++++++++------------ pyproject.toml | 2 +- 25 files changed, 435 insertions(+), 314 deletions(-) rename pfund/const/{commons.py => common.py} (100%) diff --git a/docs/getting-started/basics.md b/docs/getting-started/basics.md index ad5c281..2410434 100644 --- a/docs/getting-started/basics.md +++ b/docs/getting-started/basics.md @@ -38,7 +38,7 @@ Therefore, a virtual broker named `CRYPTO` has been created as an intermediary t ```{code-cell} :tags: [hide-output] -from pfund.const.commons import SUPPORTED_BROKERS +from pfund.const.common import SUPPORTED_BROKERS from pprint import pprint pprint(SUPPORTED_BROKERS) @@ -70,7 +70,7 @@ Unlike the virtual broker `CRYPTO`, which is an actual broker object in `pfund` ```{code-cell} :tags: [hide-output] -from pfund.const.commons import SUPPORTED_CRYPTO_EXCHANGES +from pfund.const.common import SUPPORTED_CRYPTO_EXCHANGES from pprint import pprint pprint(SUPPORTED_CRYPTO_EXCHANGES) @@ -114,7 +114,7 @@ Financial products/instruments are in the format of `XXX_YYY_PTYPE` where ```{code-cell} :tags: [hide-output] -from pfund.const.commons import SUPPORTED_PRODUCT_TYPES +from pfund.const.common import SUPPORTED_PRODUCT_TYPES from pprint import pprint pprint(SUPPORTED_PRODUCT_TYPES) @@ -132,7 +132,7 @@ Crypto product types supported by `pfund` include: ```{code-cell} :tags: [hide-output] -from pfund.const.commons import SUPPORTED_CRYPTO_PRODUCT_TYPES +from pfund.const.common import SUPPORTED_CRYPTO_PRODUCT_TYPES from pprint import pprint pprint(SUPPORTED_CRYPTO_PRODUCT_TYPES) diff --git a/pfund/accounts/account_crypto.py b/pfund/accounts/account_crypto.py index b3c421a..fee3f20 100644 --- a/pfund/accounts/account_crypto.py +++ b/pfund/accounts/account_crypto.py @@ -1,5 +1,5 @@ from pfund.accounts.account_base import BaseAccount -from pfund.const.commons import SUPPORTED_BYBIT_ACCOUNT_TYPES +from pfund.const.common import SUPPORTED_BYBIT_ACCOUNT_TYPES class CryptoAccount(BaseAccount): diff --git a/pfund/brokers/broker_base.py b/pfund/brokers/broker_base.py index de5e155..a8144fc 100644 --- a/pfund/brokers/broker_base.py +++ b/pfund/brokers/broker_base.py @@ -1,7 +1,7 @@ import logging from collections import defaultdict -from pfund.const.commons import SUPPORTED_ENVIRONMENTS +from pfund.const.common import SUPPORTED_ENVIRONMENTS from pfund.utils.utils import get_engine_class diff --git a/pfund/brokers/broker_crypto.py b/pfund/brokers/broker_crypto.py index c607257..6395490 100644 --- a/pfund/brokers/broker_crypto.py +++ b/pfund/brokers/broker_crypto.py @@ -11,7 +11,7 @@ from pfund.utils.utils import convert_to_uppercases from pfund.brokers.broker_live import LiveBroker from pfund.exchanges.exchange_base import BaseExchange -from pfund.const.commons import SUPPORTED_CRYPTO_EXCHANGES, SUPPORTED_CRYPTO_PRODUCT_TYPES +from pfund.const.common import SUPPORTED_CRYPTO_EXCHANGES, SUPPORTED_CRYPTO_PRODUCT_TYPES class CryptoBroker(LiveBroker): diff --git a/pfund/brokers/ib/broker_ib.py b/pfund/brokers/ib/broker_ib.py index 3649f93..0658a96 100644 --- a/pfund/brokers/ib/broker_ib.py +++ b/pfund/brokers/ib/broker_ib.py @@ -6,7 +6,7 @@ from pfund.adapter import Adapter from pfund.config.configuration import Configuration from pfund.const.paths import PROJ_CONFIG_PATH -from pfund.const.commons import SUPPORTED_PRODUCT_TYPES +from pfund.const.common import SUPPORTED_PRODUCT_TYPES from pfund.products import IBProduct from pfund.accounts import IBAccount from pfund.orders import IBOrder diff --git a/pfund/brokers/ib/ib_api.py b/pfund/brokers/ib/ib_api.py index e4b74ae..e3b6194 100644 --- a/pfund/brokers/ib/ib_api.py +++ b/pfund/brokers/ib/ib_api.py @@ -10,7 +10,7 @@ from pfund.brokers.ib.ib_client import IBClient from pfund.brokers.ib.ib_wrapper import * -from pfund.const.commons import SUPPORTED_DATA_CHANNELS +from pfund.const.common import SUPPORTED_DATA_CHANNELS from pfund.zeromq import ZeroMQ diff --git a/pfund/const/__init__.py b/pfund/const/__init__.py index 8aa5ab5..3aa0696 100644 --- a/pfund/const/__init__.py +++ b/pfund/const/__init__.py @@ -1,2 +1,2 @@ -from pfund.const.commons import * +from pfund.const.common import * from pfund.const.paths import * \ No newline at end of file diff --git a/pfund/const/commons.py b/pfund/const/common.py similarity index 100% rename from pfund/const/commons.py rename to pfund/const/common.py diff --git a/pfund/data_tools/data_tool_pandas.py b/pfund/data_tools/data_tool_pandas.py index e201cea..a03c49b 100644 --- a/pfund/data_tools/data_tool_pandas.py +++ b/pfund/data_tools/data_tool_pandas.py @@ -1,8 +1,7 @@ from __future__ import annotations from collections import defaultdict -from decimal import Decimal -from typing import TYPE_CHECKING, Iterator +from typing import TYPE_CHECKING, Generator if TYPE_CHECKING: from pfund.datas.data_base import BaseData @@ -15,7 +14,12 @@ class PandasDataTool(BaseDataTool): _INDEX = ['ts', 'product', 'resolution'] _GROUP = ['product', 'resolution'] - _DECIMAL_COLS = ['price', 'open', 'high', 'low', 'close', 'volume'] + + def get_df(self, copy=True): + return self.df.copy(deep=True) if copy else self.df + + def concat(self, dfs: list[pd.DataFrame]) -> pd.DataFrame: + return pd.concat(dfs) def prepare_df(self): assert self._raw_dfs, "No data is found, make sure add_data(...) is called correctly" @@ -24,19 +28,37 @@ def prepare_df(self): # arrange columns self.df = self.df[self._INDEX + [col for col in self.df.columns if col not in self._INDEX]] self._raw_dfs.clear() - + + def get_total_rows(self, df: pd.DataFrame): + return df.shape[0] + + @backtest + def iterate_df_by_chunks(self, df: pd.DataFrame, num_chunks=1) -> Generator[pd.DataFrame, None, None]: + total_rows = self.get_total_rows(df) + chunk_size = total_rows // num_chunks + for i in range(0, total_rows, chunk_size): + df_chunk = df.iloc[i:i + chunk_size].copy(deep=True) + yield df_chunk + @backtest - def preprocess_event_driven_df(self, df: pd.DataFrame) -> Iterator: + def preprocess_event_driven_df(self, df: pd.DataFrame) -> pd.DataFrame: + def _check_resolution(res): + from pfund.datas.resolution import Resolution + resolution = Resolution(res) + return resolution.is_quote(), resolution.is_tick() + # converts 'ts' from datetime to unix timestamp - df['ts'] = df['ts'].astype(int) // 10**6 # in milliseconds - df['ts'] = df['ts'] / 10**3 # in seconds with milliseconds precision - # convert float columns to decimal for consistency with live trading - for col in df.columns: - if col in self._DECIMAL_COLS: - df[col] = df[col].apply(lambda x: Decimal(str(x))) - # TODO: split 'broker' str column from 'product' str column - # df['broker'] = ... - return df.itertuples(index=False) + # in milliseconds int -> in seconds with milliseconds precision + df['ts'] = df['ts'].astype(int) // 10**6 / 10**3 + + # add 'broker', 'is_quote', 'is_tick' columns + df['broker'] = df['product'].str.split('-').str[0] + df['is_quote'], df['is_tick'] = zip(*df['resolution'].apply(_check_resolution)) + + # arrange columns + left_cols = self._INDEX + ['broker', 'is_quote', 'is_tick'] + df = df[left_cols + [col for col in df.columns if col not in left_cols]] + return df @backtest def postprocess_vectorized_df(self, df: pd.DataFrame) -> pd.DataFrame: @@ -129,34 +151,33 @@ def append_to_df(self, data: BaseData, predictions: dict, **kwargs): index=self.create_multi_index(index_data, self.df.index.names) ) self.df = pd.concat([self.df, new_row], ignore_index=False) - - def convert_ts_index_to_dt(self, df: pd.DataFrame) -> pd.DataFrame: - ts_index = df.index.get_level_values('ts') - dt_index = pd.to_datetime(ts_index, unit='s') - df.index = df.index.set_levels(dt_index, level='ts') - return df def create_multi_index(self, index_data: dict, index_names: list[str]) -> pd.MultiIndex: return pd.MultiIndex.from_tuples([tuple(index_data[name] for name in index_names)], names=index_names) - - def output_df_to_parquet(self, df: pd.DataFrame, file_path: str): - df.to_parquet(file_path, compression='zstd') - + ''' ************************************************ Helper Functions ************************************************ ''' + def get_index_values(self, df: pd.DataFrame, index: str) -> list: + assert index in df.index.names, f"index must be one of {df.index.names}" + return df.index.get_level_values(index).unique().to_list() + + def set_index_values(self, df: pd.DataFrame, index: str, values: list) -> pd.DataFrame: + assert index in df.index.names, f"index must be one of {df.index.names}" + df.index = df.index.set_levels(values, level=index) + return df + + def output_df_to_parquet(self, df: pd.DataFrame, file_path: str, compression: str='zstd'): + df.to_parquet(file_path, compression=compression) + def filter_df(self, df: pd.DataFrame, start_date: str | None=None, end_date: str | None=None, product: str='', resolution: str=''): product = product or slice(None) resolution = resolution or slice(None) return df.loc[(slice(start_date, end_date), product, resolution), :] - def get_index_values(self, df: pd.DataFrame, index: str) -> list: - assert index in self._INDEX, f"index must be one of {self._INDEX}" - return df.index.get_level_values(index).unique().to_list() - def unstack_df(self, df: pd.DataFrame): return df.unstack(level=self._GROUP) diff --git a/pfund/data_tools/data_tool_polars.py b/pfund/data_tools/data_tool_polars.py index d8bec9c..7685748 100644 --- a/pfund/data_tools/data_tool_polars.py +++ b/pfund/data_tools/data_tool_polars.py @@ -1,11 +1,11 @@ from __future__ import annotations from collections import defaultdict -from decimal import Decimal -from typing import TYPE_CHECKING, Iterator +from typing import TYPE_CHECKING, Generator if TYPE_CHECKING: from pfund.datas.data_base import BaseData +import pandas as pd import polars as pl from pfund.data_tools.data_tool_base import BaseDataTool @@ -15,6 +15,12 @@ class PolarsDataTool(BaseDataTool): _INDEX = ['ts', 'product', 'resolution'] + def get_df(self, copy=True): + return self.df.clone() if copy else self.df + + def concat(self, dfs: list[pl.DataFrame | pl.LazyFrame]) -> pl.DataFrame | pl.LazyFrame: + return pl.concat(dfs) + def prepare_df(self): assert self._raw_dfs, "No data is found, make sure add_data(...) is called correctly" self.df = pl.concat(self._raw_dfs.values()) @@ -23,13 +29,55 @@ def prepare_df(self): self.df = self.df.select(self._INDEX + [col for col in self.df.columns if col not in self._INDEX]) self._raw_dfs.clear() + def get_total_rows(self, df: pl.DataFrame | pl.LazyFrame): + if isinstance(df, pl.DataFrame): + return df.shape[0] + elif isinstance(df, pl.LazyFrame): + return df.count().collect()['ts'][0] + else: + raise ValueError("df should be either pl.DataFrame or pl.LazyFrame") + @backtest - def preprocess_event_driven_df(self, df: pl.DataFrame | pl.LazyFrame) -> Iterator: - pass + def iterate_df_by_chunks(self, lf: pl.LazyFrame, num_chunks=1) -> Generator[pd.DataFrame, None, None]: + total_rows = self.get_total_rows(lf) + chunk_size = total_rows // num_chunks + for i in range(0, total_rows, chunk_size): + df_chunk = lf.slice(i, chunk_size).collect() + yield df_chunk @backtest - def postprocess_vectorized_df(self, df: pl.DataFrame | pl.LazyFrame) -> pl.DataFrame | pl.LazyFrame: - pass + def preprocess_event_driven_df(self, df: pl.DataFrame) -> pl.DataFrame: + def _check_resolution(res): + from pfund.datas.resolution import Resolution + resolution = Resolution(res) + return { + 'is_quote': resolution.is_quote(), + 'is_tick': resolution.is_tick() + } + + df = df.with_columns( + # converts 'ts' from datetime to unix timestamp + pl.col("ts").cast(pl.Int64) // 10**6 / 10**3, + + # add 'broker', 'is_quote', 'is_tick' columns + pl.col('product').str.split("-").list.get(0).alias("broker"), + pl.col('resolution').map_elements( + _check_resolution, + return_dtype=pl.Struct([ + pl.Field('is_quote', pl.Boolean), + pl.Field('is_tick', pl.Boolean) + ]) + ).alias('Resolution') + ).unnest('Resolution') + + # arrange columns + left_cols = self._INDEX + ['broker', 'is_quote', 'is_tick'] + df = df.select(left_cols + [col for col in df.columns if col not in left_cols]) + return df + + @backtest + def postprocess_vectorized_df(self, df: pl.DataFrame) -> pl.LazyFrame: + return df.lazy() # TODO: def prepare_df_with_signals(self, models): @@ -39,13 +87,26 @@ def prepare_df_with_signals(self, models): def prepare_datasets(self, datas): pass - # TODO: def clear_df(self): - pass + self.df.clear() # TODO: def append_to_df(self, data: BaseData, predictions: dict, **kwargs): pass - def output_df_to_parquet(self, df: pl.DataFrame | pl.LazyFrame, file_path: str): - df.write_parquet(file_path, compression='zstd') \ No newline at end of file + + ''' + ************************************************ + Helper Functions + ************************************************ + ''' + def output_df_to_parquet(self, df: pl.DataFrame | pl.LazyFrame, file_path: str, compression: str='zstd'): + df.write_parquet(file_path, compression=compression) + + # TODO + def filter_df(self, df: pl.DataFrame | pl.LazyFrame, **kwargs) -> pl.DataFrame | pl.LazyFrame: + pass + + # TODO + def unstack_df(self, df: pl.DataFrame | pl.LazyFrame, **kwargs) -> pl.DataFrame | pl.LazyFrame: + pass \ No newline at end of file diff --git a/pfund/datas/data_bar.py b/pfund/datas/data_bar.py index 4d49786..8704029 100644 --- a/pfund/datas/data_bar.py +++ b/pfund/datas/data_bar.py @@ -1,6 +1,5 @@ import sys import logging -from decimal import Decimal from pfund.datas.resolution import Resolution from pfund.datas.data_time_based import TimeBasedData @@ -21,14 +20,16 @@ def __init__(self, product, resolution, shift: int=0): self.timeframe = resolution.timeframe self.unit = self.get_unit() self.shift_unit = self.get_shift_unit(shift) - # variables that will be cleared using clear() for each new bar - self.o = self.open = Decimal(0.0) - self.h = self.high = Decimal(0.0) - self.l = self.low = Decimal(sys.float_info.max) - self.c = self.close = Decimal(0.0) - self.v = self.volume = Decimal(0.0) - self._start_ts = self._end_ts = self.ts = 0.0 + self.clear() + def clear(self): + self.o = self.open = 0.0 + self.h = self.high = 0.0 + self.l = self.low = sys.float_info.max + self.c = self.close = 0.0 + self.v = self.volume = 0.0 + self._start_ts = self._end_ts = self.ts = 0.0 + def __str__(self): bar_type = 'Bar' if not self._start_ts: @@ -116,14 +117,6 @@ def get_unit(self): unit = 60 * 60 * 24 * 7 * 4 * self.period return unit - def clear(self): - self.o = self.open = Decimal(0.0) - self.h = self.high = Decimal(0.0) - self.l = self.low = Decimal(sys.float_info.max) - self.c = self.close = Decimal(0.0) - self.v = self.volume = Decimal(0.0) - self._start_ts = self._end_ts = self.ts = 0.0 - def update(self, o, h, l, c, v, ts, is_volume_aggregated): if not self.o: self.o = self.open = o diff --git a/pfund/datas/resolution.py b/pfund/datas/resolution.py index b020936..6d0aed7 100644 --- a/pfund/datas/resolution.py +++ b/pfund/datas/resolution.py @@ -1,7 +1,7 @@ import re from pfund.datas.timeframe import Timeframe, TimeframeUnits -from pfund.const.commons import SUPPORTED_TIMEFRAMES +from pfund.const.common import SUPPORTED_TIMEFRAMES class Resolution: diff --git a/pfund/engines/backtest_engine.py b/pfund/engines/backtest_engine.py index 140d7b7..8f77470 100644 --- a/pfund/engines/backtest_engine.py +++ b/pfund/engines/backtest_engine.py @@ -14,6 +14,13 @@ if TYPE_CHECKING: from pfund.types.common_literals import tSUPPORTED_BACKTEST_MODES, tSUPPORTED_DATA_TOOLS from pfund.types.core import tStrategy, tModel, tFeature, tIndicator + from pfund.models.model_base import BaseModel + +try: + import pandas as pd + import polars as pl +except ImportError: + pass import pfund as pf from pfund.git_controller import GitController @@ -29,11 +36,12 @@ class BacktestEngine(BaseEngine): def __new__( cls, *, env: str='BACKTEST', data_tool: tSUPPORTED_DATA_TOOLS='polars', mode: tSUPPORTED_BACKTEST_MODES='vectorized', - config: ConfigHandler | None=None, - append_signals=False, + config: ConfigHandler | None=None, + append_signals=False, load_models=True, - auto_git_commit=False, + auto_git_commit=False, save_backtests=False, + num_chunks=1, **settings ): if not hasattr(cls, 'mode'): @@ -50,6 +58,8 @@ def __new__( cls.auto_git_commit = auto_git_commit if not hasattr(cls, 'save_backtests'): cls.save_backtests = save_backtests + if not hasattr(cls, 'num_chunks'): + cls.num_chunks = num_chunks return super().__new__(cls, env, data_tool=data_tool, config=config, **settings) def __init__( @@ -185,12 +195,11 @@ def _commit_strategy(self, strategy: BaseStrategy) -> str | None: self.logger.debug(f"Strategy {strat} has no changes to commit, return the last {commit_hash=}") return commit_hash - def _create_backtest_history(self, strat: str, start_time: float, end_time: float): - strategy = self.get_strategy(strat) + def _create_backtest_history(self, strategy: BaseStrategy, start_time: float, end_time: float): initial_balances = {bkr: broker.get_initial_balances() for bkr, broker in self.brokers.items()} backtest_id = self._generate_backtest_id() backtest_hash = self._generate_backtest_hash(strategy) - backtest_name = self._create_backtest_name(strat, backtest_id) + backtest_name = self._create_backtest_name(strategy.name, backtest_id) backtest_iter = self._generate_backtest_iteration(backtest_hash) if self.auto_git_commit and self._git.is_git_repo(): commit_hash = self._commit_strategy(strategy) @@ -216,15 +225,16 @@ def _create_backtest_history(self, strat: str, start_time: float, end_time: floa } return backtest_history - def _output_backtest_results(self, strategy: BaseStrategy, backtest_history: dict) -> dict: + def _output_backtest_results(self, strategy: BaseStrategy, df: pd.DataFrame | pl.LazyFrame, backtest_history: dict) -> dict: backtest_name = backtest_history['metadata']['backtest_name'] - df_file_path = os.path.join(self.config.backtest_path, f'{backtest_name}.parquet') - backtest_history['result'] = df_file_path - dtl = strategy.get_data_tool() - df = strategy.df if self.mode == 'vectorized': - df = dtl.postprocess_vectorized_df(df) - dtl.output_df_to_parquet(df, df_file_path) + output_file_path = os.path.join(self.config.backtest_path, f'{backtest_name}.parquet') + dtl = strategy.get_data_tool() + dtl.output_df_to_parquet(df, output_file_path) + elif self.mode == 'event_driven': + # TODO: output trades? or orders? or df? + output_file_path = ... + backtest_history['result'] = output_file_path self._write_json(f'{backtest_name}.json', backtest_history) return backtest_history @@ -232,82 +242,92 @@ def run(self): for broker in self.brokers.values(): broker.start() self.strategy_manager.start() - - backtests = {} - if self.mode == 'vectorized': - for strat, strategy in self.strategy_manager.strategies.items(): - # _dummy strategy is only created for model training, do nothing - if strat == '_dummy': + for strat, strategy in self.strategy_manager.strategies.items(): + backtestee = strategy + if strat == '_dummy': + if self.mode == 'vectorized': continue - if not hasattr(strategy, 'backtest'): - raise Exception(f'Strategy {strat} does not have backtest() method, cannot run vectorized backtesting') - start_time = time.time() - strategy.backtest() - end_time = time.time() - self.strategy_manager.stop(strats=strat, reason='finished backtesting') - backtest_history: dict = self._create_backtest_history(strat, start_time, end_time) - if self.save_backtests: - backtest_history = self._output_backtest_results(strategy, backtest_history) - backtests[strat] = backtest_history - elif self.mode == 'event_driven': - for strat, strategy in self.strategy_manager.strategies.items(): - if strat == '_dummy': + elif self.mode == 'event_driven': # dummy strategy has exactly one model model = list(strategy.models.values())[0] backtestee = model - else: - backtestee = strategy - backtestee_type = 'strategy' if isinstance(backtestee, BaseStrategy) else 'model' - dtl = backtestee.get_data_tool() - df = backtestee.df - df_len = df.shape[0] - df_iter = dtl.preprocess_event_driven_df(df) - - # NOTE: clear dfs so that strategies/models don't know anything about the incoming data - backtestee.clear_dfs() - - start_time = time.time() - # OPTIMIZE: critical loop - for row in tqdm(df_iter, total=df_len, desc=f'Backtesting {backtestee_type} {backtestee.name}', colour='yellow'): - resolution: str = row.resolution - product: str = row.product + backtests: dict = self.backtest(backtestee) + self.strategy_manager.stop(reason='finished backtesting') + return backtests + + def backtest(self, backtestee: BaseStrategy | BaseModel) -> dict: + backtests = {} + backtestee_type = 'strategy' if isinstance(backtestee, BaseStrategy) else 'model' + dtl = backtestee.get_data_tool() + df = backtestee.get_df(copy=True) + + start_time = time.time() + if self.mode == 'vectorized': + if not hasattr(backtestee, 'backtest'): + raise Exception(f'{backtestee_type} {backtestee.name} does not have backtest() method, cannot run vectorized backtesting') + df_chunks = [] + for df_chunk in dtl.iterate_df_by_chunks(df, num_chunks=self.num_chunks): + backtestee.backtest(df_chunk) + df_chunk = dtl.postprocess_vectorized_df(df_chunk) + df_chunks.append(df_chunk) + df = dtl.concat(df_chunks) + elif self.mode == 'event_driven': + total_rows = dtl.get_total_rows(df) + # NOTE: clear dfs so that strategies/models don't know anything about the incoming data + backtestee.clear_dfs() + common_cols = ['ts', 'product', 'resolution', + 'broker', 'is_quote', 'is_tick'] + tqdm_iter = tqdm(total=total_rows, desc=f'Backtesting {backtestee_type} {backtestee.name}', colour='yellow') + # OPTIMIZE: critical loop + for df_chunk in dtl.iterate_df_by_chunks(df, num_chunks=self.num_chunks): + df_chunk = dtl.preprocess_event_driven_df(df_chunk) + if isinstance(df_chunk, pl.DataFrame): + df_chunk = df_chunk.to_pandas() + for row in df_chunk.itertuples(index=False): + tqdm_iter.update(1) + ts, product, resolution = row.ts, row.product, row.resolution broker = self.brokers[row.broker] data_manager = broker.dm - if resolution.is_quote(): + if row.is_quote: # TODO + raise NotImplementedError('Quote data is not supported in event-driven backtesting yet') quote = {} data_manager.update_quote(product, quote) - elif resolution.is_tick(): + elif row.is_tick: # TODO + raise NotImplementedError('Tick data is not supported in event-driven backtesting yet') tick = {} data_manager.update_tick(product, tick) else: + bar_cols = ['open', 'high', 'low', 'close', 'volume'] bar = { 'resolution': resolution, 'data': { - 'ts': row.ts, + 'ts': ts, 'open': row.open, 'high': row.high, 'low': row.low, 'close': row.close, - 'volume': row.volume + 'volume': row.volume, }, 'other_info': { col: getattr(row, col) for col in row._fields - if col not in ['product', 'resolution', 'ts', 'open', 'high', 'low', 'close', 'volume'] + if col not in common_cols + bar_cols }, } - data_manager.update_bar(product, bar, now=row.ts) - end_time = time.time() - self.strategy_manager.stop(strats=strat, reason='finished backtesting') - - if backtestee_type == 'strategy': - backtest_history: dict = self._create_backtest_history(strat, start_time, end_time) - if self.save_backtests: - backtest_history = self._output_backtest_results(strategy, backtest_history) - backtests[strat] = backtest_history + data_manager.update_bar(product, bar, now=ts) else: raise NotImplementedError(f'Backtesting mode {self.mode} is not supported') + end_time = time.time() + print(f'Backtest elapsed time: {end_time - start_time:.3f}(s)') + + if backtestee_type == 'strategy': + backtest_history: dict = self._create_backtest_history(backtestee, start_time, end_time) + if self.save_backtests: + backtest_history = self._output_backtest_results(backtestee, df, backtest_history) + backtests[backtestee.name] = backtest_history + elif backtestee_type == 'model': + self.assert_consistent_signals() return backtests def end(self): diff --git a/pfund/engines/base_engine.py b/pfund/engines/base_engine.py index 61f7b74..1ecfb07 100644 --- a/pfund/engines/base_engine.py +++ b/pfund/engines/base_engine.py @@ -15,7 +15,7 @@ from pfund.strategies.strategy_base import BaseStrategy from pfund.brokers.broker_base import BaseBroker from pfund.managers.strategy_manager import StrategyManager -from pfund.const.commons import SUPPORTED_ENVIRONMENTS, SUPPORTED_BROKERS, SUPPORTED_DATA_TOOLS +from pfund.const.common import SUPPORTED_ENVIRONMENTS, SUPPORTED_BROKERS, SUPPORTED_DATA_TOOLS from pfund.config_handler import ConfigHandler from pfund.plogging import set_up_loggers from pfund.plogging.config import LoggingDictConfigurator diff --git a/pfund/exchanges/bybit/ws_api.py b/pfund/exchanges/bybit/ws_api.py index f89eb47..2c04311 100644 --- a/pfund/exchanges/bybit/ws_api.py +++ b/pfund/exchanges/bybit/ws_api.py @@ -7,7 +7,7 @@ import hmac from decimal import Decimal -from pfund.const.commons import SUPPORTED_DATA_CHANNELS +from pfund.const.common import SUPPORTED_DATA_CHANNELS from pfund.exchanges.ws_api_base import BaseWebsocketApi @@ -278,8 +278,8 @@ def _process_orderbook_l2_msg(self, ws_name, full_channel, msg): asks_l2 = self._asks_l2[pdt] bid_pxs = sorted(bids_l2.keys(), key=lambda px: float(px), reverse=True)[:depth] ask_pxs = sorted(asks_l2.keys(), key=lambda px: float(px), reverse=False)[:depth] - quote['data']['bids'] = tuple((Decimal(px), Decimal(bids_l2[px])) for px in bid_pxs) - quote['data']['asks'] = tuple((Decimal(px), Decimal(asks_l2[px])) for px in ask_pxs) + quote['data']['bids'] = tuple((px, bids_l2[px]) for px in bid_pxs) + quote['data']['asks'] = tuple((px, asks_l2[px]) for px in ask_pxs) zmq = self._get_zmq(ws_name) if zmq: zmq_msg = (1, 1, (self.bkr, self.exch, pdt, quote)) @@ -294,8 +294,8 @@ def _process_tradebook_msg(self, ws_name, full_channel, msg): 'ts': 'ts', 'ts_adj': 1/10**3, # since timestamp in bybit is in mts 'data': { - 'px': ('p', Decimal,), - 'qty': ('v', Decimal, abs), + 'px': ('p', float,), + 'qty': ('v', float, abs), 'ts': ('T', float), }, # NOTE: other_info only exists in public data, e.g. orderbook, tradebook, kline etc. @@ -315,21 +315,21 @@ def _process_kline_msg(self, ws_name, full_channel, msg): 'ts': 'ts', 'ts_adj': 1/10**3, # since timestamp in bybit is in mts 'data': { - 'open': ('open', Decimal), - 'high': ('high', Decimal), - 'low': ('low', Decimal), - 'close': ('close', Decimal), - 'volume': ('volume', Decimal), + 'open': ('open', float), + 'high': ('high', float), + 'low': ('low', float), + 'close': ('close', float), + 'volume': ('volume', float), 'ts': ('timestamp', float), } # TODO: (move to OKX ws_api): # 'ts_adj': 1/10**3, # 'data': { - # 'open': (1, Decimal), - # 'high': (2, Decimal), - # 'low': (3, Decimal), - # 'close': (4, Decimal), - # 'volume': (5, Decimal), + # 'open': (1, float), + # 'high': (2, float), + # 'low': (3, float), + # 'close': (4, float), + # 'volume': (5, float), # 'ts': (0, float), # } } diff --git a/pfund/mixins/backtest.py b/pfund/mixins/backtest.py index b8d96e2..26a2209 100644 --- a/pfund/mixins/backtest.py +++ b/pfund/mixins/backtest.py @@ -80,7 +80,7 @@ def add_indicator(self, indicator: tIndicator, name: str='') -> BacktestMixin | return self.add_model(indicator, name=name) def _get_data_source(self, trading_venue: str, backtest_kwargs: dict): - from pfeed.const.commons import SUPPORTED_DATA_FEEDS + from pfeed.const.common import SUPPORTED_DATA_FEEDS trading_venue = trading_venue.upper() # if data_source is not defined, use trading_venue as data_source if trading_venue in SUPPORTED_DATA_FEEDS and 'data_source' not in backtest_kwargs: @@ -154,7 +154,7 @@ def get_historical_data( datas: list[BaseData], kwargs: dict, backtest_kwargs: dict - ) -> list[pd.DataFrame | pl.DataFrame | pl.LazyFrame]: + ) -> list[pd.DataFrame | pl.LazyFrame]: rollback_period = backtest_kwargs.get('rollback_period', '1w') start_date = backtest_kwargs.get('start_date', '') end_date = backtest_kwargs.get('end_date', '') @@ -189,16 +189,16 @@ def get_historical_data( time.sleep(rate_limit) return dfs - def _convert_pfeed_df_to_pfund_df(self, df: pd.DataFrame | pl.DataFrame | pl.LazyFrame, product: BaseProduct) -> pd.DataFrame | pl.DataFrame | pl.LazyFrame: + def _convert_pfeed_df_to_pfund_df(self, df: pd.DataFrame | pl.DataFrame | pl.LazyFrame, product: BaseProduct) -> pd.DataFrame | pl.LazyFrame: if isinstance(df, pd.DataFrame): - is_empty = df.empty if 'symbol' in df.columns: df = df.drop(columns=['symbol']) if 'product' in df.columns: df = df.drop(columns=['product']) df['product'] = repr(product) elif isinstance(df, (pl.DataFrame, pl.LazyFrame)): - is_empty = df.is_empty() + if isinstance(df, pl.DataFrame): + df = df.lazy() df = df.drop(columns=['symbol', 'product']) df = df.with_columns( pl.lit(repr(product)).alias('product'), @@ -206,5 +206,4 @@ def _convert_pfeed_df_to_pfund_df(self, df: pd.DataFrame | pl.DataFrame | pl.Laz # EXTEND else: raise NotImplementedError(f"{type(df)=} not supported") - assert not is_empty, f"dataframe is empty for {product!r}" return df \ No newline at end of file diff --git a/pfund/models/model_backtest.py b/pfund/models/model_backtest.py index 6ad3adc..7e353a9 100644 --- a/pfund/models/model_backtest.py +++ b/pfund/models/model_backtest.py @@ -41,11 +41,6 @@ def add_consumer_datas_if_no_data(self) -> list[BaseData]: def _is_dummy_strategy(self): return isinstance(self._consumer, BaseStrategy) and self._consumer.name == '_dummy' - def stop(self): - super().stop() - if self.engine.mode == 'event_driven' and self._is_dummy_strategy(): - self._assert_consistent_signals() - def load(self): if self.engine.load_models: super().load() @@ -89,7 +84,7 @@ def next(self): return new_pred.to_numpy() # FIXME: pandas specific - def _assert_consistent_signals(self): + def assert_consistent_signals(self): '''Asserts consistent model signals from vectorized and event-driven backtesting, triggered in event-driven backtesting''' import pandas.testing as pdt event_driven_signal = self.signal diff --git a/pfund/models/model_base.py b/pfund/models/model_base.py index 86175dd..810c5a1 100644 --- a/pfund/models/model_base.py +++ b/pfund/models/model_base.py @@ -125,6 +125,9 @@ def __getattr__(self, attr): def df(self): return self._data_tool.df + def get_df(self, copy=True): + return self._data_tool.get_df(copy=copy) + def get_data_tool(self): return self._data_tool diff --git a/pfund/positions/position_ib.py b/pfund/positions/position_ib.py index 084de66..9a51fdb 100644 --- a/pfund/positions/position_ib.py +++ b/pfund/positions/position_ib.py @@ -4,7 +4,7 @@ from numpy import sign -from pfund.const.commons import PRODUCT_TYPES_AS_ASSETS +from pfund.const.common import PRODUCT_TYPES_AS_ASSETS from pfund.positions.position_base import BasePosition diff --git a/pfund/products/product_crypto.py b/pfund/products/product_crypto.py index ebd4cd6..8bc3fc1 100644 --- a/pfund/products/product_crypto.py +++ b/pfund/products/product_crypto.py @@ -1,9 +1,9 @@ from decimal import Decimal from pfund.products.product_base import BaseProduct -from pfund.const.commons import SUPPORTED_CRYPTO_PRODUCT_TYPES -from pfund.const.commons import SUPPORTED_CRYPTO_MONTH_CODES -from pfund.const.commons import CRYPTO_PRODUCT_TYPES_WITH_MATURITY +from pfund.const.common import SUPPORTED_CRYPTO_PRODUCT_TYPES +from pfund.const.common import SUPPORTED_CRYPTO_MONTH_CODES +from pfund.const.common import CRYPTO_PRODUCT_TYPES_WITH_MATURITY class CryptoProduct(BaseProduct): diff --git a/pfund/strategies/strategy_base.py b/pfund/strategies/strategy_base.py index 2fa09e1..7ea9855 100644 --- a/pfund/strategies/strategy_base.py +++ b/pfund/strategies/strategy_base.py @@ -24,7 +24,7 @@ from pfund.orders.order_base import BaseOrder from pfund.zeromq import ZeroMQ from pfund.risk_monitor import RiskMonitor -from pfund.const.commons import SUPPORTED_CRYPTO_EXCHANGES +from pfund.const.common import SUPPORTED_CRYPTO_EXCHANGES from pfund.strategies.strategy_meta import MetaStrategy from pfund.utils.utils import convert_to_uppercases, get_engine_class, load_yaml_file, convert_ts_to_dt from pfund.plogging import create_dynamic_logger @@ -106,6 +106,9 @@ def __init__(self, *args, **kwargs): @property def df(self): return self._data_tool.df + + def get_df(self, copy=True): + return self._data_tool.get_df(copy=copy) def get_data_tool(self): return self._data_tool diff --git a/pfund/types/common_literals.py b/pfund/types/common_literals.py index 52ab850..76a1258 100644 --- a/pfund/types/common_literals.py +++ b/pfund/types/common_literals.py @@ -1,6 +1,6 @@ from typing import Literal -# since Literal doesn't support variables as inputs, define variables in commons.py here with prefix 't' +# since Literal doesn't support variables as inputs, define variables in common.py here with prefix 't' tSUPPORTED_ENVIRONMENTS = Literal['BACKTEST', 'TRAIN', 'SANDBOX', 'PAPER', 'LIVE'] tSUPPORTED_BROKERS = Literal['CRYPTO', 'IB'] tSUPPORTED_CRYPTO_EXCHANGES = Literal['BYBIT'] diff --git a/pfund/utils/utils.py b/pfund/utils/utils.py index 02bd85c..3ab5d1f 100644 --- a/pfund/utils/utils.py +++ b/pfund/utils/utils.py @@ -119,7 +119,7 @@ def find_strategy_class(strat: str): def get_engine_class(): - from pfund.const.commons import SUPPORTED_ENVIRONMENTS + from pfund.const.common import SUPPORTED_ENVIRONMENTS env = os.getenv('env') assert env in SUPPORTED_ENVIRONMENTS, f'Unsupported {env=}' if env == 'BACKTEST': diff --git a/poetry.lock b/poetry.lock index 391483a..56d66ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -18,7 +18,7 @@ pygments = ">=1.5" name = "aiobotocore" version = "2.12.3" description = "Async client for aws services using botocore and aiohttp" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "aiobotocore-2.12.3-py3-none-any.whl", hash = "sha256:86737685f4625e8f05c4e7a608a07cc97607263279f66cf6b02b640c4eafd324"}, @@ -37,91 +37,92 @@ boto3 = ["boto3 (>=1.34.41,<1.34.70)"] [[package]] name = "aiohttp" -version = "3.9.0b0" +version = "3.9.5" description = "Async http client/server framework (asyncio)" -optional = true +optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.0b0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:50b550b5e317e40a017bab8b25995676af3aa66dd0ef562cd7dce7f1684cd376"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8f902ad26b9814852e0a17d48f98ba4c879d8136c4fa9b235b5c043dde0a0257"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2140de122ecf3eb7947105ceb91fb6632fb21cc1d17f6ff19c3973d2d12730d"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e014b343225d8d358ee91962b588e863fded12a6e2f9b446bb3be85c678e04ae"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c7016695087e616a2806ccdb1f83609e5fecb3958c270e3e5a42f69d225536f2"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40407d5ec81682225ad5538d9bd68b0f8242caa91e72a6a9a95197fd7d9aebb2"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd54502e6b4144785f2f14a5f1544ced0a77dbecb1fd422f21dfad95dcb7fcb8"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f911fd2073621eecfe77b17926460e72980b9b996d0ab7dad5e38805ce2988"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:638ba28af2c821b70574664a991dfdfaf1a7a7ae1a8068757f7d59cdf2d8361a"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:76329f7c1f5f3185d91d61d64615d88fa3dfddf389a83f6cd46a205c5b61e01b"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:597b5d44b613dea9c62779592eb0ecae87604628564ecaff8d516457def68184"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:cd5edd7ba2b3f95346e0fc8ba2364bdd93917a1bf8528e7d60ec80cf21dfba7e"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72556e0cce47c6e558454316fc5c6a3fb0980344eee8af7aa52b495d82ef12a5"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-win32.whl", hash = "sha256:01a3b241288c4d8171fe5e2434a799d0b82700d2ed2156b43f1d7f4f521ba382"}, - {file = "aiohttp-3.9.0b0-cp310-cp310-win_amd64.whl", hash = "sha256:17962c404788c348ce5b58efaf4969249183c551890e30bfd9c035188d21e3d1"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:94197a77859ab1039b9ca6c3c393b8e7b5fc34a9abfbcb58daac38ab89684a99"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c78d2cfe1515cfb31ba67edf0518c6677a963ec2039b652b03a886733e72e65"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28b38a14f564c833e59c99f748b48803e4babeabc6a0307952b01e6c8d642cab"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e596cfc52380f71e197e7cf0e2d3c4714b4bf66d2d562cdbd5442284bac18909"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6190951b7933c834d9346e21c5a81642caa210d291cda4036daf85fc53162d35"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb0cb2cbf95cf4cc40307d0d0187f59c4b86b1d7d1a624922a7d0b046deffba7"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e27c283e21e94fa1582d31b57c514b87ab609882ade413ce43f585d73c8a33fc"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6826c59b4e99673728bcdaecacbd699b7521f17ca165c63a5e26e23d42aeea5"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aa4738f3b1b916b1cc69ed3d1dead9714919dc4d30ae0d5f6d55eadb2c511133"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b2abd7936f687de3a3ab199b145a9de01ed046eb5640cd66f47da07a9050a78"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:652cc00a97bc206c470db06276ce57ff2a53a625795bbce8435ef8b6a4cb0113"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d54529c1d95d5d200ecb7133a343785e5661a804f3dcee090a7bca3b48189d69"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:324fe990c97721ea8eb4d439f12b59d1a93cd7e0dd188c7b145bffdfbd327dc3"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-win32.whl", hash = "sha256:3a2ef8318435f40f5906af36fda20b5432e07e6a7e05de3a4d2934c25320b8ff"}, - {file = "aiohttp-3.9.0b0-cp311-cp311-win_amd64.whl", hash = "sha256:887d8757aafc7f6fbda76faaff21fc2aa31b9dca0911ecd6b60b0fe922a2abfc"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c430c706589a811b38e33e1492d194cbb0f6f2e027877bf038debced703446f"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b820981f1c5d6da382e4859318ba78c9b5c583f0920e44a18efb3387b18487e"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c64677a2df742bcd89b94c35689306663d8246a8534bea5835afc706416f8dd6"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:903155c179cda589d01936953158685747af43d98cdd3673a671c6e7f5c94178"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77cbb6e4a146449f805fa0e725b0b2a06411d21417d8eca699bbee55204201d0"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc3cc9f5e6e493a2b9c3d241fca870b5a64aa4c247f1192f9e34fae990667df8"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92071206e570b7da6380f8d376820e2a40230638b8fd8b45b28103b346704c5e"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:242e3cb0b2d441a2d20443114eebe3032078d1894ac1d97ab2dd101165ea50e1"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:044c5a8923bd44a4a0769a2886130c19f7f3a4a1a284f0ff68c2a751920ee39f"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b1b0d0f63ff48f80aa89be3ff61bc2b980c5b02895c81dbc1e44ce7b6cb5b7"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:f737a47b5df97b6da457a0b2739d6d819ffadea2f36336988b53dbdb1796ba89"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:e6d79f8b8347afbecd8047a1f6e74c810eb82497256cc906ee384635174dcaea"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f1b0a821564e315ec5cfa0abaf048355e229995a812380ec7a2200d87a6ed11"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-win32.whl", hash = "sha256:ab2702f281ca504529e82be78dae2b9ca31d51a92ab8b239bd326b74c79d7af4"}, - {file = "aiohttp-3.9.0b0-cp312-cp312-win_amd64.whl", hash = "sha256:b81722b88abd4aab656abfec122646b6171da64340ff92af3bcf1af5f0d1275e"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:49e2ca017f506d1a9c60f44301ceff2eb8bbfe24b9cd9b4c4a363d9e5f68e92b"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06cba5518d8e30b46fcec2a8ed22ec6027fc9864583e0b538da642507f66fe29"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e5201d3f8d0b2748eba5093820861639cac1ea1dfdff537f67152a1c082e1243"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c483d0a666f6cbec2e974f760f93499bbcfcb17a7c4035d4c4c653e6a3b21b1"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04f48476ce3e96843b44084fd15139b195781c10ed6eb5ffb706fb9d2ca95ce4"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09fdad08544a4479e5801c777697c155fa9d966c91b6dcf3e1a0d271ad3999f7"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:127aa57415005eb04fb1a3685c9d7b42aef6718be72b8a62b4b30ee00f7d23f4"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa8f29f0647f10f6bcd9f597f1319d13ce1d6efe2d55169226940093eeadf609"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8dc394dea47594825ac2a662c4fac6a8b294acd937396aaec8e41ed03728898b"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c332b343974c6fbfec53e3ac7afebd6ba6cc1777cda67c28fabb3562411a9b5a"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6dfad718b328de3fa30d663393d51feea625322ec723bdecdec3f5f52ba6347f"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6edaeb63a4657672b04afcc25c253e960125e805f5a8f8cfa7bf682d15115f49"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:20023087bce5f3adde4872042ea1193d31d98b29682c28a6309d72bce0d9725e"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-win32.whl", hash = "sha256:ad07ee4165a82e646310c152a74997c759d5782aef58bab9d77034b4cc87e153"}, - {file = "aiohttp-3.9.0b0-cp38-cp38-win_amd64.whl", hash = "sha256:494062a8447c6665f5237c47ca8bb5659cd3128ad9b4af5543566a11bb88df5c"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aaff57bd1ab9eb1a205f3b7a00e2dc159d1e7e4373870be0d192358a656d9e60"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c212f5066ffe9490856b706a9d9bd457f14716f4db4b1b73939245a1acecc4e"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d80664b3b82fb9ee2c7b13072651cd68d65fbb3a69721040c08969bab4335628"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7cf539fc98297e312308405949ca2f04a347eb021e30d004388cdb5d155a0ec"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6be520717b895508c63df90e48135ba616c702a9229d4be71841dce2ea6a569f"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b25e926cd16b44aeef29fffbb9fc9f577f52a6230e46926e391545b85cd0ce3"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35f6cafe361c0323945c13122c282ea22fb0df96e845f34c4d8abd96e2a81995"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c9851e3d0396686d96a7e3559bf5912ed79c944ff1a6ae3cf7b1da320c3ad2b"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ab413eddeb1a03ba84d06acf7024a646b049d991ed0616bcc1ee40dc8fffa9e"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:89b271a8658472a9d400836ee8caee743246bae5c06405a63b6ba366f58df727"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dd941d473b86d0d5a413a1832499e5b80f648d66ca0c8246c26a4ccd66bcf7ec"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ce4f000279fb85527c017ef429615f2cb5a0cb614c088610849ddc6c2ac8d91b"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f50a4f6773a9eedefb24b42c611e31dcd13f6139419a8656f7e525cb8a00687e"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-win32.whl", hash = "sha256:b14dcfcc5ad161d007da71e1c1211909d527d9d7c2795ea9e17191ba25e5d89a"}, - {file = "aiohttp-3.9.0b0-cp39-cp39-win_amd64.whl", hash = "sha256:567245a91a57c41899f5d266814c9da8782d3d949dc1e66469429f08713a3ec6"}, - {file = "aiohttp-3.9.0b0.tar.gz", hash = "sha256:cecc64fd7bae6debdf43437e3c83183c40d4f4d86486946f412c113960598eee"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, ] [package.dependencies] aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -134,7 +135,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aioitertools" version = "0.11.0" description = "itertools and builtins for AsyncIO and mixed iterables" -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"}, @@ -145,7 +146,7 @@ files = [ name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, @@ -210,6 +211,17 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] +[[package]] +name = "ansicolors" +version = "1.1.8" +description = "ANSI colors for Python" +optional = true +python-versions = "*" +files = [ + {file = "ansicolors-1.1.8-py2.py3-none-any.whl", hash = "sha256:00d2dde5a675579325902536738dd27e4fac1fd68f773fe36c21044eb559e187"}, + {file = "ansicolors-1.1.8.zip", hash = "sha256:99f94f5e3348a0bcd43c82e5fc4414013ccc19d70bd939ad71e0133ce9c372e0"}, +] + [[package]] name = "anyio" version = "4.3.0" @@ -236,7 +248,7 @@ trio = ["trio (>=0.23)"] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = true +optional = false python-versions = "*" files = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, @@ -376,6 +388,17 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "attrs" version = "23.2.0" @@ -474,20 +497,20 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "blinker" -version = "1.7.0" +version = "1.8.0" description = "Fast, simple object-to-object and broadcast signaling" optional = true python-versions = ">=3.8" files = [ - {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, - {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, + {file = "blinker-1.8.0-py3-none-any.whl", hash = "sha256:5f2c330c2586b5d4c5ece65e4dd9fa6512192d946f7c1aaac31ca0380d5d041f"}, + {file = "blinker-1.8.0.tar.gz", hash = "sha256:a07839c713d30141433647247db269dd896895b0bf56d2362f663496feae562b"}, ] [[package]] name = "botocore" version = "1.34.69" description = "Low-level, data-driven core of boto 3." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "botocore-1.34.69-py3-none-any.whl", hash = "sha256:d3802d076d4d507bf506f9845a6970ce43adc3d819dd57c2791f5c19ed6e5950"}, @@ -840,7 +863,7 @@ testing = ["mock", "pytest", "pytest-cov"] name = "connectorx" version = "0.3.2" description = "" -optional = true +optional = false python-versions = "*" files = [ {file = "connectorx-0.3.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:98274242c64a2831a8b1c86e0fa2c46a557dd8cbcf00c3adcf5a602455fb02d7"}, @@ -1278,7 +1301,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "feast" -version = "0.37.1.dev23+ge873636b4" +version = "0.37.1.dev25+g93ddb11bf" description = "Python SDK for Feast" optional = true python-versions = ">=3.9.0" @@ -1317,10 +1340,10 @@ uvicorn = {version = ">=0.14.0,<1", extras = ["standard"]} aws = ["boto3 (>=1.17.0,<2)", "docker (>=5.0.2)", "fsspec (<=2024.1.0)"] azure = ["SQLAlchemy (>=1.4.19)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "pymssql", "pyodbc (>=4.0.30)"] cassandra = ["cassandra-driver (>=3.24.0,<4)"] -ci = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.3.3)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] +ci = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.4.0)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] delta = ["deltalake"] -dev = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.3.3)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] -docs = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.3.3)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] +dev = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.4.0)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] +docs = ["SQLAlchemy (>=1.4.19)", "Sphinx (>4.0.0,<7)", "assertpy (==1.1)", "azure-identity (>=1.6.1)", "azure-storage-blob (>=0.37.0)", "boto3 (>=1.17.0,<2)", "build", "cassandra-driver (>=3.24.0,<4)", "cryptography (>=35.0,<43)", "deltalake", "docker (>=5.0.2)", "firebase-admin (>=5.2.0,<6)", "fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)", "great-expectations (>=0.15.41)", "grpcio (>=1.56.2,<2)", "grpcio-health-checking (>=1.56.2,<2)", "grpcio-reflection (>=1.56.2,<2)", "grpcio-testing (>=1.56.2,<2)", "grpcio-tools (>=1.56.2,<2)", "happybase (>=1.2.0,<3)", "hazelcast-python-client (>=5.1)", "hiredis (>=2.0.0,<3)", "httpx (>=0.23.3)", "ibis-framework", "ibis-framework[duckdb]", "ibis-substrait", "kubernetes (<=20.13.0)", "minio (==7.1.0)", "mock (==2.0.0)", "moto (<5)", "mypy (>=1.4.1)", "pip-tools", "pre-commit (<3.3.2)", "psutil (==5.9.0)", "psycopg2-binary (>=2.8.3,<3)", "py (>=1.11.0)", "pybindgen", "pymssql", "pymysql", "pyodbc (>=4.0.30)", "pyspark (>=3.0.0,<4)", "pytest (>=6.0.0,<8)", "pytest-benchmark (>=3.4.1,<4)", "pytest-cov", "pytest-env", "pytest-lazy-fixture (==0.6.3)", "pytest-mock (==1.10.4)", "pytest-ordering (>=0.6.0,<0.7.0)", "pytest-timeout (==1.4.2)", "pytest-xdist", "redis (>=4.2.2,<5)", "regex", "rockset (>=1.0.3)", "ruff (>=0.3.3)", "snowflake-connector-python[pandas] (>=3.7,<4)", "testcontainers (==4.4.0)", "trino (>=0.305.0,<0.400.0)", "types-PyMySQL", "types-PyYAML", "types-protobuf (>=3.19.22,<3.20.0)", "types-python-dateutil", "types-pytz", "types-redis", "types-requests (<2.31.0)", "types-setuptools", "types-tabulate", "urllib3 (>=1.25.4,<3)", "virtualenv (<20.24.2)", "virtualenv (==20.23.0)"] duckdb = ["ibis-framework[duckdb]"] gcp = ["fsspec (<=2024.1.0)", "google-api-core (>=1.23.0,<3)", "google-cloud-bigquery-storage (>=2.0.0,<3)", "google-cloud-bigquery[pandas] (>=2,<3.13.0)", "google-cloud-bigtable (>=2.11.0,<3)", "google-cloud-datastore (>=2.1.0,<3)", "google-cloud-storage (>=1.34.0,<3)", "googleapis-common-protos (>=1.52.0,<2)"] ge = ["great-expectations (>=0.15.41)"] @@ -1342,7 +1365,7 @@ trino = ["regex", "trino (>=0.305.0,<0.400.0)"] type = "git" url = "https://github.com/feast-dev/feast.git" reference = "HEAD" -resolved_reference = "e873636b4a5f3a05666f9284c31e488f27257ed0" +resolved_reference = "93ddb11bf5a182cea44435147e39f40b30a69db7" [[package]] name = "filelock" @@ -1480,7 +1503,7 @@ files = [ name = "frozendict" version = "2.4.2" description = "A simple immutable dictionary" -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "frozendict-2.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19743495b1e92a7e4db56fcd6a5d36ea1d1b0f550822d6fd780e44d58f0b8c18"}, @@ -1525,7 +1548,7 @@ files = [ name = "frozenlist" version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, @@ -1611,7 +1634,7 @@ files = [ name = "fsspec" version = "2024.3.1" description = "File-system specification" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, @@ -1885,7 +1908,7 @@ files = [ name = "html5lib" version = "1.1" description = "HTML parser based on the WHATWG HTML specification" -optional = true +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, @@ -2243,7 +2266,7 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, @@ -2528,13 +2551,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.1.6" +version = "4.1.8" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.1.6-py3-none-any.whl", hash = "sha256:cf3e862bc10dbf4331e4eb37438634f813c238cfc62c71c640b3b3b2caa089a8"}, - {file = "jupyterlab-4.1.6.tar.gz", hash = "sha256:7935f36ba26eb615183a4f5c2bbca5791b5108ce2a00b5505f8cfd100d53648e"}, + {file = "jupyterlab-4.1.8-py3-none-any.whl", hash = "sha256:c3baf3a2f91f89d110ed5786cd18672b9a357129d4e389d2a0dead15e11a4d2c"}, + {file = "jupyterlab-4.1.8.tar.gz", hash = "sha256:3384aded8680e7ce504fd63b8bb89a39df21c9c7694d9e7dc4a68742cdb30f9b"}, ] [package.dependencies] @@ -2545,7 +2568,7 @@ jinja2 = ">=3.0.3" jupyter-core = "*" jupyter-lsp = ">=2.0.0" jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.19.0,<3" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} @@ -2754,7 +2777,7 @@ files = [ name = "lxml" version = "5.2.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, @@ -3143,7 +3166,7 @@ files = [ name = "minio" version = "7.2.6" description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" -optional = true +optional = false python-versions = "*" files = [ {file = "minio-7.2.6-py3-none-any.whl", hash = "sha256:4972273a924f274e2d71f38f6d2afdf841a034801e60ba758e5c5aff4234b768"}, @@ -3365,7 +3388,7 @@ tests = ["pytest (>=4.6)"] name = "msgpack" version = "1.0.8" description = "MessagePack serializer" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, @@ -3430,7 +3453,7 @@ files = [ name = "multidict" version = "6.0.5" description = "multidict implementation" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, @@ -3529,7 +3552,7 @@ files = [ name = "multitasking" version = "0.0.11" description = "Non-blocking Python methods using decorators" -optional = true +optional = false python-versions = "*" files = [ {file = "multitasking-0.0.11-py3-none-any.whl", hash = "sha256:1e5b37a5f8fc1e6cfaafd1a82b6b1cc6d2ed20037d3b89c25a84f499bd7b3dd4"}, @@ -4178,21 +4201,22 @@ files = [ [[package]] name = "papermill" -version = "2.5.0" +version = "2.6.0" description = "Parameterize and run Jupyter and nteract Notebooks" optional = true python-versions = ">=3.8" files = [ - {file = "papermill-2.5.0-py3-none-any.whl", hash = "sha256:c42303afb92e482a60ae1df2577be59a5b7a64c5cd52d37c74c7f74e36085708"}, - {file = "papermill-2.5.0.tar.gz", hash = "sha256:ea7b70c0553f56fe91b0fa9cc5e17012cd699320a8b015373e7870c5e6086c72"}, + {file = "papermill-2.6.0-py3-none-any.whl", hash = "sha256:0f09da6ef709f3f14dde77cb1af052d05b14019189869affff374c9e612f2dd5"}, + {file = "papermill-2.6.0.tar.gz", hash = "sha256:9fe2a91912fd578f391b4cc8d6d105e73124dcd0cde2a43c3c4a1c77ac88ea24"}, ] [package.dependencies] -aiohttp = {version = "3.9.0b0", markers = "python_version == \"3.12\""} +aiohttp = {version = ">=3.9.0", markers = "python_version == \"3.12\""} +ansicolors = "*" click = "*" entrypoints = "*" nbclient = ">=0.2.0" -nbformat = ">=5.1.2" +nbformat = ">=5.2.0" pyyaml = "*" requests = "*" tenacity = ">=5.0.2" @@ -4202,13 +4226,13 @@ tqdm = ">=4.32.2" all = ["PyGithub (>=1.55)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "gcsfs (>=0.2.0)", "pyarrow (>=2.0)", "requests (>=2.21.0)"] azure = ["azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "requests (>=2.21.0)"] black = ["black (>=19.3b0)"] -dev = ["attrs (>=17.4.0)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "botocore", "bumpversion", "check-manifest", "codecov", "coverage", "flake8", "gcsfs (>=0.2.0)", "google-compute-engine", "ipython (>=5.0)", "ipywidgets", "moto", "notebook", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "pytest-env (>=0.6.2)", "pytest-mock (>=1.10)", "recommonmark", "requests (>=2.21.0)", "setuptools (>=38.6.0)", "tox", "twine (>=1.11.0)", "wheel (>=0.31.0)"] -docs = ["PyGithub (>=1.55)", "Sphinx (>=3.5.4)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "furo (>=2021.4.11b34)", "gcsfs (>=0.2.0)", "moto (>=2.0.5)", "myst-parser (>=0.13.7)", "pyarrow (>=2.0)", "requests (>=2.21.0)", "sphinx-copybutton (>=0.3.1)"] +dev = ["attrs (>=17.4.0)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "botocore", "bumpversion", "check-manifest", "codecov", "coverage", "gcsfs (>=0.2.0)", "google-compute-engine", "ipython (>=5.0)", "ipywidgets", "moto (>=5.0.0,<5.1.0)", "notebook", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "pytest-env (>=0.6.2)", "pytest-mock (>=1.10)", "recommonmark", "requests (>=2.21.0)", "setuptools (>=38.6.0)", "tox", "twine (>=1.11.0)", "wheel (>=0.31.0)"] +docs = ["PyGithub (>=1.55)", "Sphinx (>=7.2.6)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "entrypoints", "furo (>=2023.9.10)", "gcsfs (>=0.2.0)", "moto (>=4.2.8)", "myst-parser (>=2.0.0)", "nbformat", "pyarrow (>=2.0)", "requests (>=2.21.0)", "sphinx-copybutton (>=0.5.2)"] gcs = ["gcsfs (>=0.2.0)"] github = ["PyGithub (>=1.55)"] hdfs = ["pyarrow (>=2.0)"] s3 = ["boto3"] -test = ["attrs (>=17.4.0)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "botocore", "bumpversion", "check-manifest", "codecov", "coverage", "flake8", "gcsfs (>=0.2.0)", "google-compute-engine", "ipython (>=5.0)", "ipywidgets", "moto", "notebook", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "pytest-env (>=0.6.2)", "pytest-mock (>=1.10)", "recommonmark", "requests (>=2.21.0)", "setuptools (>=38.6.0)", "tox", "twine (>=1.11.0)", "wheel (>=0.31.0)"] +test = ["attrs (>=17.4.0)", "azure-datalake-store (>=0.0.30)", "azure-identity (>=1.3.1)", "azure-storage-blob (>=12.1.0)", "black (>=19.3b0)", "boto3", "botocore", "bumpversion", "check-manifest", "codecov", "coverage", "gcsfs (>=0.2.0)", "google-compute-engine", "ipython (>=5.0)", "ipywidgets", "moto (>=5.0.0,<5.1.0)", "notebook", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "pytest-env (>=0.6.2)", "pytest-mock (>=1.10)", "recommonmark", "requests (>=2.21.0)", "setuptools (>=38.6.0)", "tox", "twine (>=1.11.0)", "wheel (>=0.31.0)"] [[package]] name = "parso" @@ -4276,7 +4300,7 @@ files = [ name = "peewee" version = "3.17.3" description = "a little orm" -optional = true +optional = false python-versions = "*" files = [ {file = "peewee-3.17.3.tar.gz", hash = "sha256:ef15f90b628e41a584be8306cdc3243c51f73ce88b06154d9572f6d0284a0169"}, @@ -4300,30 +4324,32 @@ ptyprocess = ">=0.5" name = "pfeed" version = "0.0.1.dev12" description = "Data pipeline for algo-trading, getting and storing both real-time and historical data made easy." -optional = true -python-versions = "<3.13,>=3.10" -files = [ - {file = "pfeed-0.0.1.dev12-py3-none-any.whl", hash = "sha256:d8bf7e8d3f84be30e0cb389962014ff03567afbb20f8b4687972e9125d49184e"}, - {file = "pfeed-0.0.1.dev12.tar.gz", hash = "sha256:5328f48a1a9dc52ad0d1f4657ff1140840b7d46fc2d45633f9bb5f3bfc4668ff"}, -] +optional = false +python-versions = ">=3.10 <3.13" +files = [] +develop = true [package.dependencies] -beautifulsoup4 = ">=4.12.3,<5.0.0" -connectorx = {version = ">=0.3.2,<0.4.0", optional = true, markers = "extra == \"boost\""} -minio = {version = ">=7.2.5,<8.0.0", optional = true, markers = "extra == \"data\""} -pandas = {version = ">=2.2.0,<3.0.0", optional = true, markers = "extra == \"df\""} -pfund = ">=0.0.1.dev13,<0.0.2" -polars = {version = ">=0.20.16,<0.21.0", optional = true, markers = "extra == \"df\""} -psutil = {version = ">=5.9.8,<6.0.0", optional = true, markers = "extra == \"data\""} -pyarrow = {version = ">=15.0.0,<16.0.0", optional = true, markers = "extra == \"boost\""} -ray = {version = ">=2.10.0,<3.0.0", optional = true, markers = "extra == \"boost\""} -s3fs = {version = ">=2024.3.1,<2025.0.0", optional = true, markers = "extra == \"data\""} -yfinance = ">=0.2.37,<0.3.0" +beautifulsoup4 = "^4.12.3" +connectorx = {version = "^0.3.2", optional = true} +minio = {version = "^7.2.5", optional = true} +pandas = {version = "^2.2.0", optional = true} +pfund = "^0.0.1.dev13" +polars = {version = "^0.20.21", optional = true} +psutil = {version = "^5.9.8", optional = true} +pyarrow = {version = "^15.0.0", optional = true} +ray = {version = "^2.10.0", optional = true} +s3fs = {version = "^2024.3.1", optional = true} +yfinance = "^0.2.37" [package.extras] boost = ["connectorx (>=0.3.2,<0.4.0)", "pyarrow (>=15.0.0,<16.0.0)", "ray (>=2.10.0,<3.0.0)"] data = ["minio (>=7.2.5,<8.0.0)", "psutil (>=5.9.8,<6.0.0)", "s3fs (>=2024.3.1,<2025.0.0)"] -df = ["pandas (>=2.2.0,<3.0.0)", "polars (>=0.20.16,<0.21.0)"] +df = ["pandas (>=2.2.0,<3.0.0)", "polars (>=0.20.21,<0.21.0)"] + +[package.source] +type = "directory" +url = "../pfeed" [[package]] name = "pfolio" @@ -4513,7 +4539,7 @@ testing = ["pytest", "pytest-benchmark"] name = "polars" version = "0.20.22" description = "Blazingly fast DataFrame library" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "polars-0.20.22-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d211aed6ae34845e1a9766e3b487f73ee9d5044927cc748f7498a72a5a0c8805"}, @@ -4616,7 +4642,7 @@ wcwidth = "*" name = "protobuf" version = "4.25.3" description = "" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, @@ -4689,7 +4715,7 @@ tests = ["pytest"] name = "pyarrow" version = "15.0.2" description = "Python library for Apache Arrow" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, @@ -4797,7 +4823,7 @@ files = [ name = "pycryptodome" version = "3.20.0" description = "Cryptographic library for Python" -optional = true +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, @@ -5021,13 +5047,13 @@ testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytes [[package]] name = "pytest" -version = "8.1.1" +version = "8.2.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, ] [package.dependencies] @@ -5035,11 +5061,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2.0" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -5516,7 +5542,7 @@ full = ["numpy"] name = "ray" version = "2.12.0" description = "Ray provides a simple, universal API for building distributed applications." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "ray-2.12.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:775c4dda4292e813c29dc5cb22e61de7f47a411cf5a7a8a8b5dbb61fe9cd83f8"}, @@ -5864,7 +5890,7 @@ files = [ name = "s3fs" version = "2024.3.1" description = "Convenient Filesystem interface over S3" -optional = true +optional = false python-versions = ">= 3.8" files = [ {file = "s3fs-2024.3.1-py3-none-any.whl", hash = "sha256:f4566a5446c473740d272ec08e0b4aae8db1aa05f662c42ff0aa2c89bb5060ea"}, @@ -6927,13 +6953,13 @@ files = [ [[package]] name = "tox" -version = "4.14.2" +version = "4.15.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.14.2-py3-none-any.whl", hash = "sha256:2900c4eb7b716af4a928a7fdc2ed248ad6575294ed7cfae2ea41203937422847"}, - {file = "tox-4.14.2.tar.gz", hash = "sha256:0defb44f6dafd911b61788325741cc6b2e12ea71f987ac025ad4d649f1f1a104"}, + {file = "tox-4.15.0-py3-none-any.whl", hash = "sha256:300055f335d855b2ab1b12c5802de7f62a36d4fd53f30bd2835f6a201dda46ea"}, + {file = "tox-4.15.0.tar.gz", hash = "sha256:7a0beeef166fbe566f54f795b4906c31b428eddafc0102ac00d20998dd1933f6"}, ] [package.dependencies] @@ -7514,7 +7540,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, @@ -7593,7 +7619,7 @@ files = [ name = "yarl" version = "1.9.4" description = "Yet another URL library" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, @@ -7696,7 +7722,7 @@ multidict = ">=4.0" name = "yfinance" version = "0.2.38" description = "Download market data from Yahoo! Finance API" -optional = true +optional = false python-versions = "*" files = [ {file = "yfinance-0.2.38-py2.py3-none-any.whl", hash = "sha256:07525cf84414272723a3e2b9d4c0a2898ddb60cc0828aa190de26664fac6f676"}, @@ -7743,4 +7769,4 @@ ml = ["feast", "mlflow", "scikit-learn", "ta", "torch"] [metadata] lock-version = "2.0" python-versions = ">=3.10 <3.13" -content-hash = "298cbe69b8c8dd36ae92aeaca4dbf5c66ca253fb583238643ebe56ddf409d302" +content-hash = "7f55cf69ec073efd9033f02fb2071d92ff3d07329a24583ca0733995380f15eb" diff --git a/pyproject.toml b/pyproject.toml index 55c9766..92045e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ pfund = "pfund.main:run_cli" optional = true [tool.poetry.group.dev.dependencies] -# pfeed = { path = "../pfeed", develop = true, extras = ["df", "data", "boost"] } +pfeed = { path = "../pfeed", develop = true, extras = ["df", "data", "boost"] } # pfolio = { path = "../pfolio", develop = true, extras = ["bayesian", "data", "portfolio", "temporary"] } pfolio = { path = "../pfolio", develop = true } pybit = "^5.6.2"