diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 321e9f81..6b153cb0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -6,6 +6,9 @@ name: "Build, lint, and test" on: + push: + branches: + - main pull_request: types: - opened @@ -26,6 +29,7 @@ jobs: package: - "hooks/openfeature-hooks-opentelemetry" - "providers/openfeature-provider-flagd" + - "providers/openfeature-provider-ofrep" steps: - uses: actions/checkout@v4 @@ -45,6 +49,11 @@ jobs: run: hatch build working-directory: ${{ matrix.package }} + - name: Type checking + if: matrix.python-version == '3.11' + working-directory: ${{ matrix.package }} + run: hatch run mypy:run + - name: Test with pytest run: hatch test -c working-directory: ${{ matrix.package }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d2ec28d6..9b08fdba 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,19 +14,3 @@ repos: - id: check-yaml - id: trailing-whitespace - id: check-merge-conflict - - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.2 - hooks: - - id: mypy - args: [--python-version=3.8] - additional_dependencies: - - openfeature-sdk>=0.4.0 - - opentelemetry-api - - types-protobuf - - types-PyYAML - - types-requests - - mmh3 - - semver - - panzi-json-logic - exclude: proto|tests diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 15269c97..583f262f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -38,6 +38,10 @@ To run the integration tests you need to have a container runtime, like docker, hatch run test ``` +### Type checking + +Run `mypy` by entering the package directory and running `hatch run mypy:run`. + ## Pull Request All contributions to the OpenFeature project are welcome via GitHub pull requests. diff --git a/hooks/openfeature-hooks-opentelemetry/pyproject.toml b/hooks/openfeature-hooks-opentelemetry/pyproject.toml index 0c585f3c..b826dfeb 100644 --- a/hooks/openfeature-hooks-opentelemetry/pyproject.toml +++ b/hooks/openfeature-hooks-opentelemetry/pyproject.toml @@ -50,6 +50,14 @@ cov = [ "cov-report", ] +[tool.hatch.envs.mypy] +dependencies = [ + "mypy[faster-cache]>=1.13.0", +] + +[tool.hatch.envs.mypy.scripts] +run = "mypy" + [tool.hatch.build.targets.sdist] exclude = [ ".gitignore", @@ -57,3 +65,16 @@ exclude = [ [tool.hatch.build.targets.wheel] packages = ["src/openfeature"] + +[tool.mypy] +mypy_path = "src" +files = "src" + +python_version = "3.8" # should be identical to the minimum supported version +namespace_packages = true +explicit_package_bases = true +local_partial_types = true +pretty = true + +strict = true +disallow_any_generics = false diff --git a/mypy.ini b/mypy.ini index 4c24b416..2c54f1ab 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -files = hooks,providers +files = hooks,providers/openfeature-provider-ofrep exclude = proto|tests untyped_calls_exclude = flagd.proto diff --git a/providers/openfeature-provider-flagd/README.md b/providers/openfeature-provider-flagd/README.md index 5f807eef..2e1c8cd0 100644 --- a/providers/openfeature-provider-flagd/README.md +++ b/providers/openfeature-provider-flagd/README.md @@ -105,10 +105,8 @@ The default options can be defined in the FlagdProvider constructor. > [!NOTE] > Some configurations are only applicable for RPC resolver. - -### Caching (RPC only) - -> [!NOTE] -> The in-process resolver does not benefit from caching since all evaluations are done locally and do not involve I/O. - -The provider attempts to establish a connection to flagd's event stream (up to 5 times by default). -If the connection is successful and caching is enabled, each flag returned with the reason `STATIC` is cached until an event is received -concerning the cached flag (at which point it is removed from the cache). - -On invocation of a flag evaluation (if caching is available), an attempt is made to retrieve the entry from the cache, if -found the flag is returned with the reason `CACHED`. - -By default, the provider is configured to -use [least recently used (lru)](https://pypi.org/project/cachebox/) -caching with up to 1000 entries. - ## License Apache 2.0 - See [LICENSE](./LICENSE) for more information. diff --git a/providers/openfeature-provider-flagd/openfeature/test-harness b/providers/openfeature-provider-flagd/openfeature/test-harness index 6197b3d9..96d0744e 160000 --- a/providers/openfeature-provider-flagd/openfeature/test-harness +++ b/providers/openfeature-provider-flagd/openfeature/test-harness @@ -1 +1 @@ -Subproject commit 6197b3d956d358bf662e5b8e0aebdc4800480f6b +Subproject commit 96d0744e65ff81f748ab137ef37cdbf7b42ff882 diff --git a/providers/openfeature-provider-flagd/pyproject.toml b/providers/openfeature-provider-flagd/pyproject.toml index ba0ae310..738ba02a 100644 --- a/providers/openfeature-provider-flagd/pyproject.toml +++ b/providers/openfeature-provider-flagd/pyproject.toml @@ -17,7 +17,7 @@ classifiers = [ ] keywords = [] dependencies = [ - "openfeature-sdk>=0.4.0", + "openfeature-sdk>=0.6.0", "grpcio>=1.68.0", "protobuf>=4.25.2", "mmh3>=4.1.0", @@ -60,6 +60,20 @@ cov = [ "cov-report", ] + +[tool.hatch.envs.mypy] +dependencies = [ + "mypy[faster-cache]>=1.13.0", + "types-protobuf", + "types-pyyaml", +] +pre-install-commands = [ + "hatch build", +] + +[tool.hatch.envs.mypy.scripts] +run = "mypy" + [tool.hatch.build.hooks.protobuf] generate_pyi = false dependencies = [ @@ -83,8 +97,6 @@ outputs = ["{proto_path}/{proto_name}_pb2_grpc.pyi"] exclude = [ ".gitignore", "/openfeature", - "test-harness", - "spec" ] [tool.hatch.build.targets.wheel] @@ -93,6 +105,32 @@ packages = ["src/openfeature"] [tool.coverage.run] omit = [ # exclude generated files - "src/schemas/*", + "src/openfeature/schemas/*", "tests/**", ] + +[tool.mypy] +mypy_path = "src" +files = "src" + +python_version = "3.8" # should be identical to the minimum supported version +namespace_packages = true +explicit_package_bases = true +local_partial_types = true +pretty = true + +strict = true +disallow_any_generics = false + +[[tool.mypy.overrides]] +module = [ + "grpc.*", + "json_logic.*", +] +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = [ + "openfeature.schemas.*" +] +warn_unused_ignores = false diff --git a/providers/openfeature-provider-flagd/pytest.ini b/providers/openfeature-provider-flagd/pytest.ini new file mode 100644 index 00000000..66da895f --- /dev/null +++ b/providers/openfeature-provider-flagd/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +markers = + rpc: tests for rpc mode. + in-process: tests for rpc mode. + customCert: Supports custom certs. + unixsocket: Supports unixsockets. + events: Supports events. + sync: Supports sync. + caching: Supports caching. + offline: Supports offline. diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py index b31a16aa..fb80f9db 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py @@ -2,22 +2,41 @@ import typing from enum import Enum -ENV_VAR_MAX_EVENT_STREAM_RETRIES = "FLAGD_MAX_EVENT_STREAM_RETRIES" -ENV_VAR_KEEP_ALIVE_TIME_MS = "FLAGD_KEEP_ALIVE_TIME_MS" +class ResolverType(Enum): + RPC = "rpc" + IN_PROCESS = "in-process" -ENV_VAR_DEADLINE_MS = "FLAGD_DEADLINE_MS" -ENV_VAR_STREAM_DEADLINE_MS = "FLAGD_STREAM_DEADLINE_MS" -ENV_VAR_CACHE_TYPE = "FLAGD_CACHE_TYPE" +class CacheType(Enum): + LRU = "lru" + DISABLED = "disabled" + + +DEFAULT_CACHE = CacheType.LRU +DEFAULT_CACHE_SIZE = 1000 +DEFAULT_DEADLINE = 500 +DEFAULT_HOST = "localhost" +DEFAULT_KEEP_ALIVE = 0 +DEFAULT_OFFLINE_SOURCE_PATH: typing.Optional[str] = None +DEFAULT_PORT_IN_PROCESS = 8015 +DEFAULT_PORT_RPC = 8013 +DEFAULT_RESOLVER_TYPE = ResolverType.RPC +DEFAULT_RETRY_BACKOFF = 1000 +DEFAULT_STREAM_DEADLINE = 600000 +DEFAULT_TLS = False + +ENV_VAR_CACHE_SIZE = "FLAGD_MAX_CACHE_SIZE" +ENV_VAR_CACHE_TYPE = "FLAGD_CACHE" +ENV_VAR_DEADLINE_MS = "FLAGD_DEADLINE_MS" ENV_VAR_HOST = "FLAGD_HOST" -ENV_VAR_MAX_CACHE_SIZE = "FLAGD_MAX_CACHE_SIZE" +ENV_VAR_KEEP_ALIVE_TIME_MS = "FLAGD_KEEP_ALIVE_TIME_MS" ENV_VAR_OFFLINE_FLAG_SOURCE_PATH = "FLAGD_OFFLINE_FLAG_SOURCE_PATH" -ENV_VAR_OFFLINE_POLL_INTERVAL_SECONDS = "FLAGD_OFFLINE_POLL_INTERVAL_SECONDS" ENV_VAR_PORT = "FLAGD_PORT" -ENV_VAR_RESOLVER_TYPE = "FLAGD_RESOLVER_TYPE" +ENV_VAR_RESOLVER_TYPE = "FLAGD_RESOLVER" ENV_VAR_RETRY_BACKOFF_MS = "FLAGD_RETRY_BACKOFF_MS" ENV_VAR_SELECTOR = "FLAGD_SELECTOR" +ENV_VAR_STREAM_DEADLINE_MS = "FLAGD_STREAM_DEADLINE_MS" ENV_VAR_TLS = "FLAGD_TLS" T = typing.TypeVar("T") @@ -27,6 +46,14 @@ def str_to_bool(val: str) -> bool: return val.lower() == "true" +def convert_resolver_type(val: typing.Union[str, ResolverType]) -> ResolverType: + if isinstance(val, str): + v = val.lower() + return ResolverType(v) + else: + return ResolverType(val) + + def env_or_default( env_var: str, default: T, cast: typing.Optional[typing.Callable[[str], T]] = None ) -> typing.Union[str, T]: @@ -36,16 +63,6 @@ def env_or_default( return val if cast is None else cast(val) -class ResolverType(Enum): - RPC = "rpc" - IN_PROCESS = "in-process" - - -class CacheType(Enum): - LRU = "lru" - DISABLED = "disabled" - - class Config: def __init__( # noqa: PLR0913 self, @@ -53,78 +70,97 @@ def __init__( # noqa: PLR0913 port: typing.Optional[int] = None, tls: typing.Optional[bool] = None, selector: typing.Optional[str] = None, - resolver_type: typing.Optional[ResolverType] = None, + resolver: typing.Optional[ResolverType] = None, offline_flag_source_path: typing.Optional[str] = None, retry_backoff_ms: typing.Optional[int] = None, - cache_type: typing.Optional[CacheType] = None, - max_cache_size: typing.Optional[int] = None, - deadline: typing.Optional[int] = None, + deadline_ms: typing.Optional[int] = None, stream_deadline_ms: typing.Optional[int] = None, keep_alive_time: typing.Optional[int] = None, - max_event_stream_retries: typing.Optional[int] = None, + cache: typing.Optional[CacheType] = None, + max_cache_size: typing.Optional[int] = None, ): - self.host = env_or_default(ENV_VAR_HOST, "localhost") if host is None else host + self.host = env_or_default(ENV_VAR_HOST, DEFAULT_HOST) if host is None else host + self.tls = ( - env_or_default(ENV_VAR_TLS, False, cast=str_to_bool) if tls is None else tls + env_or_default(ENV_VAR_TLS, DEFAULT_TLS, cast=str_to_bool) + if tls is None + else tls ) + self.retry_backoff_ms: int = ( - int(env_or_default(ENV_VAR_RETRY_BACKOFF_MS, 1000, cast=int)) + int( + env_or_default( + ENV_VAR_RETRY_BACKOFF_MS, DEFAULT_RETRY_BACKOFF, cast=int + ) + ) if retry_backoff_ms is None else retry_backoff_ms ) - self.selector = ( - env_or_default(ENV_VAR_SELECTOR, None) if selector is None else selector + + self.resolver = ( + env_or_default( + ENV_VAR_RESOLVER_TYPE, DEFAULT_RESOLVER_TYPE, cast=convert_resolver_type + ) + if resolver is None + else resolver ) - self.resolver_type = ( - ResolverType(env_or_default(ENV_VAR_RESOLVER_TYPE, "rpc")) - if resolver_type is None - else resolver_type + + default_port = ( + DEFAULT_PORT_RPC + if self.resolver is ResolverType.RPC + else DEFAULT_PORT_IN_PROCESS ) - default_port = 8013 if self.resolver_type is ResolverType.RPC else 8015 self.port: int = ( int(env_or_default(ENV_VAR_PORT, default_port, cast=int)) if port is None else port ) + self.offline_flag_source_path = ( - env_or_default(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, None) + env_or_default( + ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, DEFAULT_OFFLINE_SOURCE_PATH + ) if offline_flag_source_path is None else offline_flag_source_path ) - self.cache_type = ( - CacheType(env_or_default(ENV_VAR_CACHE_TYPE, CacheType.LRU)) - if cache_type is None - else cache_type - ) - - self.max_cache_size: int = ( - int(env_or_default(ENV_VAR_MAX_CACHE_SIZE, 1000, cast=int)) - if max_cache_size is None - else max_cache_size - ) - - self.deadline: int = ( - int(env_or_default(ENV_VAR_DEADLINE_MS, 500, cast=int)) - if deadline is None - else deadline + self.deadline_ms: int = ( + int(env_or_default(ENV_VAR_DEADLINE_MS, DEFAULT_DEADLINE, cast=int)) + if deadline_ms is None + else deadline_ms ) self.stream_deadline_ms: int = ( - int(env_or_default(ENV_VAR_STREAM_DEADLINE_MS, 600000, cast=int)) + int( + env_or_default( + ENV_VAR_STREAM_DEADLINE_MS, DEFAULT_STREAM_DEADLINE, cast=int + ) + ) if stream_deadline_ms is None else stream_deadline_ms ) self.keep_alive_time: int = ( - int(env_or_default(ENV_VAR_KEEP_ALIVE_TIME_MS, 0, cast=int)) + int( + env_or_default(ENV_VAR_KEEP_ALIVE_TIME_MS, DEFAULT_KEEP_ALIVE, cast=int) + ) if keep_alive_time is None else keep_alive_time ) - self.max_event_stream_retries: int = ( - int(env_or_default(ENV_VAR_MAX_EVENT_STREAM_RETRIES, 5, cast=int)) - if max_event_stream_retries is None - else max_event_stream_retries + self.cache = ( + CacheType(env_or_default(ENV_VAR_CACHE_TYPE, DEFAULT_CACHE)) + if cache is None + else cache + ) + + self.max_cache_size: int = ( + int(env_or_default(ENV_VAR_CACHE_SIZE, DEFAULT_CACHE_SIZE, cast=int)) + if max_cache_size is None + else max_cache_size + ) + + self.selector = ( + env_or_default(ENV_VAR_SELECTOR, None) if selector is None else selector ) diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py index 721245fd..6616457d 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py @@ -21,8 +21,8 @@ # provider.initialise(schema="https",endpoint="example.com",port=1234,timeout=10) """ -import logging import typing +import warnings from openfeature.evaluation_context import EvaluationContext from openfeature.event import ProviderEventDetails @@ -50,10 +50,10 @@ def __init__( # noqa: PLR0913 selector: typing.Optional[str] = None, resolver_type: typing.Optional[ResolverType] = None, offline_flag_source_path: typing.Optional[str] = None, - cache_type: typing.Optional[CacheType] = None, - max_cache_size: typing.Optional[int] = None, stream_deadline_ms: typing.Optional[int] = None, keep_alive_time: typing.Optional[int] = None, + cache_type: typing.Optional[CacheType] = None, + max_cache_size: typing.Optional[int] = None, ): """ Create an instance of the FlagdProvider @@ -61,23 +61,32 @@ def __init__( # noqa: PLR0913 :param host: the host to make requests to :param port: the port the flagd service is available on :param tls: enable/disable secure TLS connectivity - :param timeout: the maximum to wait before a request times out + :param deadline: the maximum to wait before a request times out + :param timeout: the maximum time to wait before a request times out + :param retry_backoff_ms: the number of milliseconds to backoff + :param offline_flag_source_path: the path to the flag source file + :param stream_deadline_ms: the maximum time to wait before a request times out + :param keep_alive_time: the number of milliseconds to keep alive + :param resolver_type: the type of resolver to use """ if deadline is None and timeout is not None: deadline = timeout * 1000 - logging.info( - "'timeout' property is deprecated, please use 'deadline' instead, be aware that 'deadline' is in milliseconds" + warnings.warn( + "'timeout' property is deprecated, please use 'deadline' instead, be aware that 'deadline' is in milliseconds", + DeprecationWarning, + stacklevel=2, ) + self.config = Config( host=host, port=port, tls=tls, - deadline=deadline, + deadline_ms=deadline, retry_backoff_ms=retry_backoff_ms, selector=selector, - resolver_type=resolver_type, + resolver=resolver_type, offline_flag_source_path=offline_flag_source_path, - cache_type=cache_type, + cache=cache_type, max_cache_size=max_cache_size, stream_deadline_ms=stream_deadline_ms, keep_alive_time=keep_alive_time, @@ -86,14 +95,14 @@ def __init__( # noqa: PLR0913 self.resolver = self.setup_resolver() def setup_resolver(self) -> AbstractResolver: - if self.config.resolver_type == ResolverType.RPC: + if self.config.resolver == ResolverType.RPC: return GrpcResolver( self.config, self.emit_provider_ready, self.emit_provider_error, self.emit_provider_configuration_changed, ) - elif self.config.resolver_type == ResolverType.IN_PROCESS: + elif self.config.resolver == ResolverType.IN_PROCESS: return InProcessResolver( self.config, self.emit_provider_ready, @@ -102,7 +111,7 @@ def setup_resolver(self) -> AbstractResolver: ) else: raise ValueError( - f"`resolver_type` parameter invalid: {self.config.resolver_type}" + f"`resolver_type` parameter invalid: {self.config.resolver}" ) def initialize(self, evaluation_context: EvaluationContext) -> None: diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py index d57c404a..3c8e5550 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py @@ -4,7 +4,7 @@ import typing import grpc -from cachebox import LRUCache # type:ignore[import-not-found] +from cachebox import BaseCacheImpl, LRUCache from google.protobuf.json_format import MessageToDict from google.protobuf.struct_pb2 import Struct @@ -20,7 +20,7 @@ TypeMismatchError, ) from openfeature.flag_evaluation import FlagResolutionDetails, Reason -from openfeature.schemas.protobuf.flagd.evaluation.v1 import ( # type:ignore[import-not-found] +from openfeature.schemas.protobuf.flagd.evaluation.v1 import ( evaluation_pb2, evaluation_pb2_grpc, ) @@ -29,6 +29,9 @@ from ..flag_type import FlagType from .protocol import AbstractResolver +if typing.TYPE_CHECKING: + from google.protobuf.message import Message + T = typing.TypeVar("T") logger = logging.getLogger("openfeature.contrib") @@ -37,6 +40,8 @@ class GrpcResolver(AbstractResolver): MAX_BACK_OFF = 120 + MAX_BACK_OFF = 120 + def __init__( self, config: Config, @@ -50,20 +55,18 @@ def __init__( self.emit_provider_ready = emit_provider_ready self.emit_provider_error = emit_provider_error self.emit_provider_configuration_changed = emit_provider_configuration_changed - - self.stub, self.channel = self.create_stub() - self.retry_backoff_seconds = config.retry_backoff_ms * 0.001 - self.streamline_deadline_seconds = config.stream_deadline_ms * 0.001 - self.deadline = config.deadline * 0.001 - self.connected = False - - self._cache = ( + self.cache: typing.Optional[BaseCacheImpl] = ( LRUCache(maxsize=self.config.max_cache_size) - if self.config.cache_type == CacheType.LRU + if self.config.cache == CacheType.LRU else None ) + self.stub, self.channel = self._create_stub() + self.retry_backoff_seconds = config.retry_backoff_ms * 0.001 + self.streamline_deadline_seconds = config.stream_deadline_ms * 0.001 + self.deadline = config.deadline_ms * 0.001 + self.connected = False - def create_stub( + def _create_stub( self, ) -> typing.Tuple[evaluation_pb2_grpc.ServiceStub, grpc.Channel]: config = self.config @@ -73,6 +76,10 @@ def create_stub( options=(("grpc.keepalive_time_ms", config.keep_alive_time),), ) stub = evaluation_pb2_grpc.ServiceStub(channel) + + if self.cache: + self.cache.clear() + return stub, channel def initialize(self, evaluation_context: EvaluationContext) -> None: @@ -81,8 +88,8 @@ def initialize(self, evaluation_context: EvaluationContext) -> None: def shutdown(self) -> None: self.active = False self.channel.close() - if self._cache: - self._cache.clear() + if self.cache: + self.cache.clear() def connect(self) -> None: self.active = True @@ -104,7 +111,6 @@ def connect(self) -> None: def listen(self) -> None: retry_delay = self.retry_backoff_seconds - call_args = ( {"timeout": self.streamline_deadline_seconds} if self.streamline_deadline_seconds > 0 @@ -135,8 +141,8 @@ def listen(self) -> None: return except grpc.RpcError as e: logger.error(f"SyncFlags stream error, {e.code()=} {e.details()=}") - if e.code() == grpc.StatusCode.UNAVAILABLE: - self.stub, self.channel = self.create_stub() + # re-create the stub if there's a connection issue - otherwise reconnect does not work as expected + self.stub, self.channel = self._create_stub() except ParseError: logger.exception( f"Could not parse flag data using flagd syntax: {message=}" @@ -156,9 +162,9 @@ def listen(self) -> None: def handle_changed_flags(self, data: typing.Any) -> None: changed_flags = list(data["flags"].keys()) - if self._cache: + if self.cache: for flag in changed_flags: - self._cache.pop(flag) + self.cache.pop(flag) self.emit_provider_configuration_changed(ProviderEventDetails(changed_flags)) @@ -209,21 +215,15 @@ def _resolve( # noqa: PLR0915 C901 default_value: T, evaluation_context: typing.Optional[EvaluationContext], ) -> FlagResolutionDetails[T]: - if self._cache is not None and flag_key in self._cache: - cached_flag: FlagResolutionDetails[T] = self._cache[flag_key] + if self.cache is not None and flag_key in self.cache: + cached_flag: FlagResolutionDetails[T] = self.cache[flag_key] cached_flag.reason = Reason.CACHED return cached_flag context = self._convert_context(evaluation_context) call_args = {"timeout": self.deadline} try: - request: typing.Union[ - evaluation_pb2.ResolveBooleanRequest, - evaluation_pb2.ResolveIntRequest, - evaluation_pb2.ResolveStringRequest, - evaluation_pb2.ResolveObjectRequest, - evaluation_pb2.ResolveFloatRequest, - ] + request: Message if flag_type == FlagType.BOOLEAN: request = evaluation_pb2.ResolveBooleanRequest( flag_key=flag_key, context=context @@ -278,8 +278,8 @@ def _resolve( # noqa: PLR0915 C901 variant=response.variant, ) - if response.reason == Reason.STATIC and self._cache is not None: - self._cache.insert(flag_key, result) + if response.reason == Reason.STATIC and self.cache is not None: + self.cache.insert(flag_key, result) return result diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py index 899e11a1..5a02bc82 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py @@ -35,7 +35,7 @@ def __init__( self.emit_provider_ready = emit_provider_ready self.emit_provider_error = emit_provider_error - self.deadline_seconds = config.deadline * 0.001 + self.deadline_seconds = config.deadline_ms * 0.001 self.last_modified = 0.0 self.flag_store = flag_store diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py index 650fc95d..eeaf5218 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py @@ -37,7 +37,7 @@ def __init__( self.stub, self.channel = self.create_stub() self.retry_backoff_seconds = config.retry_backoff_ms * 0.001 self.streamline_deadline_seconds = config.stream_deadline_ms * 0.001 - self.deadline = config.deadline * 0.001 + self.deadline = config.deadline_ms * 0.001 self.selector = config.selector self.emit_provider_ready = emit_provider_ready self.emit_provider_error = emit_provider_error diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/targeting.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/targeting.py index bb73a8cd..bcc00a5a 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/targeting.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/targeting.py @@ -1,8 +1,8 @@ import time import typing -from json_logic import builtins, jsonLogic # type: ignore[import-untyped] -from json_logic.types import JsonValue # type: ignore[import-untyped] +from json_logic import builtins, jsonLogic +from json_logic.types import JsonValue from openfeature.evaluation_context import EvaluationContext diff --git a/providers/openfeature-provider-flagd/tests/e2e/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/conftest.py index 142ec7f0..077e6926 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/conftest.py +++ b/providers/openfeature-provider-flagd/tests/e2e/conftest.py @@ -11,6 +11,15 @@ SPEC_PATH = "../../openfeature/spec" +# running all gherkin tests, except the ones, not implemented +def pytest_collection_modifyitems(config): + marker = "not customCert and not unixsocket and not sync and not targetURI" + + # this seems to not work with python 3.8 + if hasattr(config.option, "markexpr") and config.option.markexpr == "": + config.option.markexpr = marker + + @pytest.fixture(autouse=True, scope="module") def setup(request, port, image): container: DockerContainer = FlagdContainer( diff --git a/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py b/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py index a9514363..e80fb0f7 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py +++ b/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py @@ -11,7 +11,7 @@ class FlagdContainer(DockerContainer): def __init__( self, - image: str = "ghcr.io/open-feature/flagd-testbed:v0.5.13", + image: str = "ghcr.io/open-feature/flagd-testbed:v0.5.15", port: int = 8013, **kwargs, ) -> None: diff --git a/providers/openfeature-provider-flagd/tests/e2e/rpc_cache.feature b/providers/openfeature-provider-flagd/tests/e2e/rpc_cache.feature deleted file mode 100644 index 8d40710d..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/rpc_cache.feature +++ /dev/null @@ -1,44 +0,0 @@ -Feature: Flag evaluation with Caching - -# This test suite contains scenarios to test the flag evaluation API. - - Background: - Given a provider is registered with caching - - Scenario: Resolves boolean details with caching - When a boolean flag with key "boolean-flag" is evaluated with details and default value "false" - Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "STATIC" - Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "CACHED" - - Scenario: Resolves string details with caching - When a string flag with key "string-flag" is evaluated with details and default value "bye" - Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "STATIC" - Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "CACHED" - - Scenario: Resolves integer details with caching - When an integer flag with key "integer-flag" is evaluated with details and default value 1 - Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "STATIC" - Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "CACHED" - - Scenario: Resolves float details with caching - When a float flag with key "float-flag" is evaluated with details and default value 0.1 - Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "STATIC" - Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "CACHED" - - Scenario: Resolves object details with caching - When an object flag with key "object-flag" is evaluated with details and a null default value - Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively - And the variant should be "template", and the reason should be "STATIC" - Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively - And the variant should be "template", and the reason should be "CACHED" - - Scenario: Flag change event with caching - When a string flag with key "changing-flag" is evaluated with details - When a PROVIDER_CONFIGURATION_CHANGED handler is added - And a flag with key "changing-flag" is modified - Then the returned reason should be "STATIC" - Then the returned reason should be "CACHED" - Then the PROVIDER_CONFIGURATION_CHANGED handler must run - And the event details must indicate "changing-flag" was altered - Then the returned reason should be "STATIC" - Then the returned reason should be "CACHED" \ No newline at end of file diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_config.py b/providers/openfeature-provider-flagd/tests/e2e/test_config.py new file mode 100644 index 00000000..dc413aee --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/test_config.py @@ -0,0 +1,103 @@ +import re +import sys +import typing + +import pytest +from asserts import assert_equal +from pytest_bdd import given, parsers, scenarios, then, when +from tests.e2e.conftest import TEST_HARNESS_PATH + +from openfeature.contrib.provider.flagd.config import CacheType, Config, ResolverType + + +def camel_to_snake(name): + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() + + +def str2bool(v): + return v.lower() in ("yes", "true", "t", "1") + + +def convert_resolver_type(val: typing.Union[str, ResolverType]) -> ResolverType: + if isinstance(val, str): + v = val.lower() + return ResolverType(v) + else: + return ResolverType(val) + + +type_cast = { + "Integer": int, + "Long": int, + "String": str, + "Boolean": str2bool, + "ResolverType": convert_resolver_type, + "CacheType": CacheType, +} + + +@pytest.fixture(autouse=True, scope="module") +def setup(request): + pass + + +@pytest.fixture() +def option_values() -> dict: + return {} + + +@given( + parsers.cfparse( + 'an option "{option}" of type "{type_info}" with value "{value}"', + ), +) +def option_with_value(option: str, value: str, type_info: str, option_values: dict): + value = type_cast[type_info](value) + option_values[camel_to_snake(option)] = value + + +@given( + parsers.cfparse( + 'an environment variable "{env}" with value "{value}"', + ), +) +def env_with_value(monkeypatch, env: str, value: str): + monkeypatch.setenv(env, value) + + +@when( + parsers.cfparse( + "a config was initialized", + ), + target_fixture="config", +) +def initialize_config(option_values): + return Config(**option_values) + + +@when( + parsers.cfparse( + 'a config was initialized for "{resolver_type}"', + ), + target_fixture="config", +) +def initialize_config_for(resolver_type: str, option_values: dict): + return Config(resolver=ResolverType(resolver_type), **option_values) + + +@then( + parsers.cfparse( + 'the option "{option}" of type "{type_info}" should have the value "{value}"', + ) +) +def check_option_value(option, value, type_info, config): + value = type_cast[type_info](value) + value = value if value != "null" else None + assert_equal(config.__getattribute__(camel_to_snake(option)), value) + + +if sys.version_info >= (3, 9): + scenarios( + f"{TEST_HARNESS_PATH}/gherkin/config.feature", + ) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py b/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py new file mode 100644 index 00000000..f73dc990 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py @@ -0,0 +1,94 @@ +import json +import os +import tempfile +from os import listdir + +import pytest +import yaml +from pytest_bdd import given, scenario, scenarios +from tests.e2e.conftest import SPEC_PATH, TEST_HARNESS_PATH +from tests.e2e.steps import wait_for + +from openfeature import api +from openfeature.client import OpenFeatureClient +from openfeature.contrib.provider.flagd import FlagdProvider +from openfeature.contrib.provider.flagd.config import ResolverType +from openfeature.provider import ProviderStatus + +KEY_EVALUATORS = "$evaluators" + +KEY_FLAGS = "flags" + +MERGED_FILE = "merged_file" + + +@pytest.fixture(params=["json", "yaml"], scope="module") +def file_name(request): + extension = request.param + result = {KEY_FLAGS: {}, KEY_EVALUATORS: {}} + + path = os.path.abspath( + os.path.join(os.path.dirname(__file__), f"{TEST_HARNESS_PATH}/flags/") + ) + + for f in listdir(path): + with open(path + "/" + f, "rb") as infile: + loaded_json = json.load(infile) + result[KEY_FLAGS] = {**result[KEY_FLAGS], **loaded_json[KEY_FLAGS]} + if loaded_json.get(KEY_EVALUATORS): + result[KEY_EVALUATORS] = { + **result[KEY_EVALUATORS], + **loaded_json[KEY_EVALUATORS], + } + + with tempfile.NamedTemporaryFile( + "w", delete=False, suffix="." + extension + ) as outfile: + if extension == "json": + json.dump(result, outfile) + else: + yaml.dump(result, outfile) + + return outfile + + +@pytest.fixture(autouse=True, scope="module") +def client_name() -> str: + return "in-process" + + +@pytest.fixture(autouse=True, scope="module") +def resolver_type() -> ResolverType: + return ResolverType.IN_PROCESS + + +@pytest.fixture(autouse=True, scope="module") +def setup(request, client_name, file_name, resolver_type): + """nothing to boot""" + api.set_provider( + FlagdProvider( + resolver_type=resolver_type, offline_flag_source_path=file_name.name + ), + client_name, + ) + + +@given("a flagd provider is set", target_fixture="client") +@given("a provider is registered", target_fixture="client") +def setup_provider(client_name) -> OpenFeatureClient: + client = api.get_client(client_name) + wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) + return client + + +@pytest.mark.skip(reason="Eventing not implemented") +@scenario(f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", "Flag change event") +def test_flag_change_event(): + """not implemented""" + + +scenarios( + f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", + f"{TEST_HARNESS_PATH}/gherkin/flagd-json-evaluator.feature", + f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature", +) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py b/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py index cbd9a8b4..3fefb300 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py +++ b/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py @@ -1,13 +1,8 @@ import pytest -from pytest_bdd import given, scenarios +from pytest_bdd import scenarios from tests.e2e.conftest import SPEC_PATH, TEST_HARNESS_PATH -from tests.e2e.steps import wait_for -from openfeature import api -from openfeature.client import OpenFeatureClient -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.contrib.provider.flagd.config import CacheType, ResolverType -from openfeature.provider import ProviderStatus +from openfeature.contrib.provider.flagd.config import ResolverType @pytest.fixture(autouse=True, scope="module") @@ -30,22 +25,9 @@ def image(): return "ghcr.io/open-feature/flagd-testbed:v0.5.13" -@given("a provider is registered with caching", target_fixture="client") -def setup_caching_provider(setup, resolver_type, client_name) -> OpenFeatureClient: - api.set_provider( - FlagdProvider( - resolver_type=resolver_type, port=setup, cache_type=CacheType.LRU - ), - client_name, - ) - client = api.get_client(client_name) - wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) - return client - - scenarios( f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", f"{TEST_HARNESS_PATH}/gherkin/flagd-json-evaluator.feature", f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature", - "./rpc_cache.feature", + f"{TEST_HARNESS_PATH}/gherkin/flagd-rpc-caching.feature", ) diff --git a/providers/openfeature-provider-flagd/tests/test_config.py b/providers/openfeature-provider-flagd/tests/test_config.py index 7b233c87..cc53a029 100644 --- a/providers/openfeature-provider-flagd/tests/test_config.py +++ b/providers/openfeature-provider-flagd/tests/test_config.py @@ -1,29 +1,153 @@ -from openfeature.contrib.provider.flagd.config import Config +import pytest +from openfeature.contrib.provider.flagd.config import ( + DEFAULT_CACHE, + DEFAULT_CACHE_SIZE, + DEFAULT_DEADLINE, + DEFAULT_HOST, + DEFAULT_KEEP_ALIVE, + DEFAULT_OFFLINE_SOURCE_PATH, + DEFAULT_PORT_IN_PROCESS, + DEFAULT_PORT_RPC, + DEFAULT_RESOLVER_TYPE, + DEFAULT_RETRY_BACKOFF, + DEFAULT_STREAM_DEADLINE, + DEFAULT_TLS, + ENV_VAR_CACHE_SIZE, + ENV_VAR_CACHE_TYPE, + ENV_VAR_DEADLINE_MS, + ENV_VAR_HOST, + ENV_VAR_KEEP_ALIVE_TIME_MS, + ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, + ENV_VAR_PORT, + ENV_VAR_RESOLVER_TYPE, + ENV_VAR_RETRY_BACKOFF_MS, + ENV_VAR_STREAM_DEADLINE_MS, + ENV_VAR_TLS, + CacheType, + Config, + ResolverType, +) -def test_return_default_values(): + +def test_return_default_values_rpc(): config = Config() - assert config.host == "localhost" - assert config.port == 8013 - assert config.tls is False - assert config.deadline == 500 + assert config.cache == DEFAULT_CACHE + assert config.max_cache_size == DEFAULT_CACHE_SIZE + assert config.deadline_ms == DEFAULT_DEADLINE + assert config.host == DEFAULT_HOST + assert config.keep_alive_time == DEFAULT_KEEP_ALIVE + assert config.offline_flag_source_path == DEFAULT_OFFLINE_SOURCE_PATH + assert config.port == DEFAULT_PORT_RPC + assert config.resolver == DEFAULT_RESOLVER_TYPE + assert config.retry_backoff_ms == DEFAULT_RETRY_BACKOFF + assert config.stream_deadline_ms == DEFAULT_STREAM_DEADLINE + assert config.tls is DEFAULT_TLS + + +def test_return_default_values_in_process(): + config = Config(resolver=ResolverType.IN_PROCESS) + assert config.cache == DEFAULT_CACHE + assert config.max_cache_size == DEFAULT_CACHE_SIZE + assert config.deadline_ms == DEFAULT_DEADLINE + assert config.host == DEFAULT_HOST + assert config.keep_alive_time == DEFAULT_KEEP_ALIVE + assert config.offline_flag_source_path == DEFAULT_OFFLINE_SOURCE_PATH + assert config.port == DEFAULT_PORT_IN_PROCESS + assert config.resolver == ResolverType.IN_PROCESS + assert config.retry_backoff_ms == DEFAULT_RETRY_BACKOFF + assert config.stream_deadline_ms == DEFAULT_STREAM_DEADLINE + assert config.tls is DEFAULT_TLS + +@pytest.fixture(params=ResolverType, scope="module") +def resolver_type(request): + return request.param -def test_overrides_defaults_with_environment(monkeypatch): - monkeypatch.setenv("FLAGD_HOST", "flagd") - monkeypatch.setenv("FLAGD_PORT", "1234") - monkeypatch.setenv("FLAGD_TLS", "true") + +def test_overrides_defaults_with_environment(monkeypatch, resolver_type): # noqa: PLR0915 + cache = CacheType.DISABLED + cache_size = 456 + deadline = 1 + host = "flagd" + keep_alive = 2 + offline_path = "path" + port = 1234 + retry_backoff = 3 + stream_deadline = 4 + tls = True + + monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value)) + monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size)) + monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline)) + monkeypatch.setenv(ENV_VAR_HOST, host) + monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive)) + monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path) + monkeypatch.setenv(ENV_VAR_PORT, str(port)) + monkeypatch.setenv(ENV_VAR_RESOLVER_TYPE, str(resolver_type.value)) + monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff)) + monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline)) + monkeypatch.setenv(ENV_VAR_TLS, str(tls)) config = Config() - assert config.host == "flagd" - assert config.port == 1234 - assert config.tls is True + assert config.cache == cache + assert config.max_cache_size == cache_size + assert config.deadline_ms == deadline + assert config.host == host + assert config.keep_alive_time == keep_alive + assert config.offline_flag_source_path == offline_path + assert config.port == port + assert config.resolver == resolver_type + assert config.retry_backoff_ms == retry_backoff + assert config.stream_deadline_ms == stream_deadline + assert config.tls is tls + +def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_type): # noqa: PLR0915 + cache = CacheType.LRU + cache_size = 456 + deadline = 1 + host = "flagd" + keep_alive = 2 + offline_path = "path" + port = 1234 + retry_backoff = 3 + stream_deadline = 4 + tls = True -def test_uses_arguments_over_environments_and_defaults(monkeypatch): - monkeypatch.setenv("FLAGD_HOST", "flagd") + monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value) + "value") + monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size) + "value") + monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline) + "value") + monkeypatch.setenv(ENV_VAR_HOST, host + "value") + monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive) + "value") + monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path + "value") + monkeypatch.setenv(ENV_VAR_PORT, str(port) + "value") + monkeypatch.setenv(ENV_VAR_RESOLVER_TYPE, str(resolver_type) + "value") + monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff) + "value") + monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline) + "value") + monkeypatch.setenv(ENV_VAR_TLS, str(tls) + "value") - config = Config(host="flagd2", port=12345, tls=True) - assert config.host == "flagd2" - assert config.port == 12345 - assert config.tls is True + config = Config( + cache=cache, + max_cache_size=cache_size, + deadline_ms=deadline, + host=host, + port=port, + resolver=resolver_type, + retry_backoff_ms=retry_backoff, + stream_deadline_ms=stream_deadline, + tls=tls, + keep_alive_time=keep_alive, + offline_flag_source_path=offline_path, + ) + assert config.cache == cache + assert config.max_cache_size == cache_size + assert config.deadline_ms == deadline + assert config.host == host + assert config.keep_alive_time == keep_alive + assert config.offline_flag_source_path == offline_path + assert config.port == port + assert config.resolver == resolver_type + assert config.retry_backoff_ms == retry_backoff + assert config.stream_deadline_ms == stream_deadline + assert config.tls is tls diff --git a/providers/openfeature-provider-flagd/tests/test_grpc_sync_connector.py b/providers/openfeature-provider-flagd/tests/test_grpc_sync_connector.py index a1e45fae..95a35be7 100644 --- a/providers/openfeature-provider-flagd/tests/test_grpc_sync_connector.py +++ b/providers/openfeature-provider-flagd/tests/test_grpc_sync_connector.py @@ -35,7 +35,7 @@ def test_invalid_payload(flag_configuration: str): emit_provider_error = MagicMock() flag_store = FlagStore(emit_provider_configuration_changed) watcher = GrpcWatcher( - Config(deadline=200), flag_store, emit_provider_ready, emit_provider_error + Config(deadline_ms=200), flag_store, emit_provider_ready, emit_provider_error ) fake_sync_flags = fake_grpc_service(flag_configuration) diff --git a/providers/openfeature-provider-ofrep/pyproject.toml b/providers/openfeature-provider-ofrep/pyproject.toml index 693ec7bf..30a174da 100644 --- a/providers/openfeature-provider-ofrep/pyproject.toml +++ b/providers/openfeature-provider-ofrep/pyproject.toml @@ -27,26 +27,36 @@ Homepage = "https://github.com/open-feature/python-sdk-contrib" [tool.hatch] -[tool.hatch.envs.default] +[tool.hatch.envs.hatch-test] dependencies = [ "coverage[toml]>=6.5", "pytest", "requests-mock", - "types-requests", ] -[tool.hatch.envs.default.scripts] -test = "pytest {args:tests}" -test-cov = "coverage run -m pytest {args:tests}" +[tool.hatch.envs.hatch-test.scripts] +run = "pytest {args:tests}" +run-cov = "coverage run -m pytest {args:tests}" +cov-combine = "coverage combine" cov-report = [ "coverage xml", "coverage html", + "coverage report", ] cov = [ "test-cov", "cov-report", ] +[tool.hatch.envs.mypy] +dependencies = [ + "mypy[faster-cache]>=1.13.0", + "types-requests", +] + +[tool.hatch.envs.mypy.scripts] +run = "mypy" + [tool.hatch.build.targets.sdist] exclude = [ ".gitignore", @@ -60,3 +70,16 @@ packages = ["src/openfeature"] omit = [ "tests/**", ] + +[tool.mypy] +mypy_path = "src" +files = "src" + +python_version = "3.8" # should be identical to the minimum supported version +namespace_packages = true +explicit_package_bases = true +local_partial_types = true +pretty = true + +strict = true +disallow_any_generics = false