Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement build cmd in rust #56

Merged
merged 1 commit into from
Mar 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 49 additions & 51 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ packages = [{include = "meta_memcache", from="src"}]
python = "^3.8"
uhashring = "^2.1"
marisa-trie = "^1.0.0"
meta-memcache-socket = "^0.1.0"
meta-memcache-socket = "0.1.1"

[tool.poetry.group.extras.dependencies]
prometheus-client = "^0.17.1"
Expand Down
4 changes: 1 addition & 3 deletions src/meta_memcache/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,15 @@
)
from meta_memcache.protocol import (
Conflict,
Flag,
IntFlag,
Key,
MetaCommand,
Miss,
NotStored,
ServerVersion,
ResponseFlags,
RequestFlags,
SetMode,
Success,
TokenFlag,
Value,
)
from meta_memcache.routers.default import DefaultRouter
Expand Down
8 changes: 4 additions & 4 deletions src/meta_memcache/cache_client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Callable, Iterable, Optional, Tuple
from typing import Callable, Iterable, Optional

from meta_memcache.base.base_cache_client import BaseCacheClient
from meta_memcache.commands.high_level_commands import HighLevelCommandsMixin
Expand All @@ -25,7 +25,7 @@ def cache_client_from_servers(
servers: Iterable[ServerAddress],
connection_pool_factory_fn: Callable[[ServerAddress], ConnectionPool],
serializer: Optional[BaseSerializer] = None,
key_encoder_fn: Callable[[Key], Tuple[bytes, bool]] = default_key_encoder,
key_encoder_fn: Callable[[Key], bytes] = default_key_encoder,
raise_on_server_error: bool = True,
) -> CacheApi:
executor = DefaultExecutor(
Expand All @@ -48,7 +48,7 @@ def cache_client_with_gutter_from_servers(
gutter_ttl: int,
connection_pool_factory_fn: Callable[[ServerAddress], ConnectionPool],
serializer: Optional[BaseSerializer] = None,
key_encoder_fn: Callable[[Key], Tuple[bytes, bool]] = default_key_encoder,
key_encoder_fn: Callable[[Key], bytes] = default_key_encoder,
raise_on_server_error: bool = True,
) -> CacheApi:
executor = DefaultExecutor(
Expand Down Expand Up @@ -76,7 +76,7 @@ def ephemeral_cache_client_from_servers(
max_ttl: int,
connection_pool_factory_fn: Callable[[ServerAddress], ConnectionPool],
serializer: Optional[BaseSerializer] = None,
key_encoder_fn: Callable[[Key], Tuple[bytes, bool]] = default_key_encoder,
key_encoder_fn: Callable[[Key], bytes] = default_key_encoder,
raise_on_server_error: bool = True,
) -> CacheApi:
executor = DefaultExecutor(
Expand Down
Loading
Loading