diff --git a/Dockerfile b/Dockerfile index 5368f213..2f0be4b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ ENV PDM_USE_VENV=no ENV PYTHONPATH=/work/__pypackages__/3.7/lib RUN apt-get update && apt-get install -y libpq-dev && \ - pip install --upgrade pip==${PIP_VERSION} && pip install pdm==${PDM_VERSION} + pip install --upgrade pip==${PIP_VERSION} && pip install pdm==${PDM_VERSION} # for pyproject.toml to extract version COPY hiku/__init__.py ./hiku/__init__.py @@ -26,7 +26,7 @@ RUN pdm sync -G dev FROM base as docs -RUN pdm sync -G docs +RUN pdm sync -G dev -G docs FROM base as tests diff --git a/MANIFEST.in b/MANIFEST.in index 8eb7ad22..865ffa5c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,4 +2,3 @@ recursive-include hiku *.js recursive-include hiku *.jsx recursive-include hiku *.css recursive-include hiku *.html -recursive-include hiku *.proto diff --git a/README.rst b/README.rst index 6343ffb7..2ef30391 100644 --- a/README.rst +++ b/README.rst @@ -27,7 +27,6 @@ Optional dependencies ~~~~~~~~~~~~~~~~~~~~~ * `graphql-core` - for GraphQL support -* `protobuf` - for Protobuf support * `sqlalchemy` - for SQLAlchemy support as a data-source * `aiopg` - for async PostgreSQL support with `aiopg` * `asyncpg` - for async PostgreSQL support with `asyncpg` @@ -39,7 +38,7 @@ Highlights ~~~~~~~~~~ * Not coupled to a single specific query language -* Flexibility in result serialization, including binary formats +* Flexibility in result serialization * Natively uses normalized result representation, without data duplication * All concurrency models supported: coroutines, threads * Parallel query execution @@ -85,13 +84,13 @@ Query: .. code-block:: python - from hiku.engine import Engine + from hiku.schema import Schema from hiku.builder import Q, build from hiku.executors.sync import SyncExecutor - engine = Engine(SyncExecutor()) + schema = Schema(SyncExecutor(), GRAPH) - result = engine.execute_query(GRAPH, build([ + result = schema.execute_sync(build([ Q.characters[ Q.name, Q.species, @@ -99,8 +98,8 @@ Query: ])) # use result in your code - for character in result.characters: - print(character.name, '-', character.species) + for character in result.data["characters"]: + print(character["name"], '-', character["species"]) Output: diff --git a/docker-compose.yaml b/docker-compose.yaml index 19049c96..ee000ac4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,4 +1,4 @@ -version: '3.7' +version: "3.7" services: base: &base diff --git a/docs/conf.py b/docs/conf.py index cc6f8d3c..77735337 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,33 +1,31 @@ extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", ] -autoclass_content = 'both' -autodoc_member_order = 'bysource' +autoclass_content = "both" +autodoc_member_order = "bysource" intersphinx_mapping = { - 'python': ('https://docs.python.org/3.6', None), - 'aiopg': ('http://aiopg.readthedocs.io/en/stable', None), - 'sqlalchemy': ('http://docs.sqlalchemy.org/en/rel_1_1', None), + "python": ("https://docs.python.org/3.6", None), + "aiopg": ("http://aiopg.readthedocs.io/en/stable", None), + "sqlalchemy": ("http://docs.sqlalchemy.org/en/rel_1_1", None), } -source_suffix = '.rst' -master_doc = 'index' +source_suffix = ".rst" +master_doc = "index" -project = 'hiku' -copyright = '2019, Vladimir Magamedov' -author = 'Vladimir Magamedov' +project = "hiku" +copyright = "2019, Vladimir Magamedov" +author = "Vladimir Magamedov" templates_path = [] -html_theme = 'furo' -html_static_path = ['_static'] -html_theme_options = { - 'display_version': False, -} +html_theme = "furo" +html_static_path = ["_static"] +html_theme_options = {} def setup(app): - app.add_css_file('style.css?r=1') - app.add_css_file('fixes.css?r=1') + app.add_css_file("style.css?r=1") + app.add_css_file("fixes.css?r=1") diff --git a/docs/example.proto b/docs/example.proto deleted file mode 100644 index 60fdee9b..00000000 --- a/docs/example.proto +++ /dev/null @@ -1,10 +0,0 @@ -syntax = "proto3"; - -message Character { - string name = 1; - string species = 2; -} - -message Root { - repeated Character characters = 1; -} diff --git a/docs/example_pb2.py b/docs/example_pb2.py deleted file mode 100644 index 89f6ebc8..00000000 --- a/docs/example_pb2.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: docs/example.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12\x64ocs/example.proto\"*\n\tCharacter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07species\x18\x02 \x01(\t\"&\n\x04Root\x12\x1e\n\ncharacters\x18\x01 \x03(\x0b\x32\n.Characterb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'docs.example_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals['_CHARACTER']._serialized_start=22 - _globals['_CHARACTER']._serialized_end=64 - _globals['_ROOT']._serialized_start=66 - _globals['_ROOT']._serialized_end=104 -# @@protoc_insertion_point(module_scope) diff --git a/docs/index.rst b/docs/index.rst index 98d1d644..049129c0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,7 +11,6 @@ User's Guide subgraph asyncio graphql - protobuf scalars enums interfaces diff --git a/docs/protobuf.rst b/docs/protobuf.rst deleted file mode 100644 index d9f18ef5..00000000 --- a/docs/protobuf.rst +++ /dev/null @@ -1,107 +0,0 @@ -Using Protocol Buffers -====================== - -Query format -~~~~~~~~~~~~ - -It is possible to serialize queries using `Protocol Buffers`_ in binary form, -instead of sending it as a text in edn_ or GraphQL_ format. - -Hiku has a :doc:`hiku/protobuf/query.proto ` file, -which describes message types for query serialization. - -Here is how they can be used to build query: - -.. literalinclude:: test_protobuf.py - :lines: 20-28 - :dedent: 4 - -This query is equivalent to this query in edn_ format: - -.. code-block:: clojure - - [{:characters [:name]}] - -And is equivalent to this query in GraphQL_ format: - -.. code-block:: javascript - - { - characters { - name - } - } - -.. note:: Protocol Buffers has it's own ability to specify requirements for - `get` operations -- ``google.protobuf.FieldMask``, but this approach has - limitations for our use. For example, field options can't be expressed with - field masks. So it would be hard to utilize field masks for our use case - and that's why Hiku provides it's own message types for queries. - -Query export -~~~~~~~~~~~~ - -In Python it is not required to use example above in order to build query using -Protocol Buffers message classes. Hiku provides handy -:py:func:`~hiku.export.protobuf.export` function to transform query into -Protocol Buffers message: - -.. literalinclude:: test_protobuf.py - :lines: 30-42 - :dedent: 4 - -Query reading -~~~~~~~~~~~~~ - -In order to execute query, Hiku provides :py:func:`~hiku.readers.protobuf.read` -function, which can be used to deserialize query from Protocol Buffers message: - -.. literalinclude:: test_protobuf.py - :lines: 52-56 - :dedent: 4 - -Result serialization -~~~~~~~~~~~~~~~~~~~~ - -The main advantage of using Hiku with Protocol Buffers is to give efficient -binary format for result serialization, which can be safely read in any other -language with Protocol Buffers support. - -.. note:: Hiku is only suitable with latest 3rd version of Protocol Buffers - format. This is because only in 3rd version of Protocol Buffers all message - fields are *strictly optional*, and this opens possibility for clients to - express their requirements and server will return only what client cares - about. - -Here is our example of a graph, similar to the one from :doc:`basics`: - -.. literalinclude:: test_protobuf.py - :lines: 7-16 - -This graph can be expressed like this in ``example.proto`` file: - -.. literalinclude:: example.proto - :language: proto - -Now we can generate ``example_pb2.py`` file from ``example.proto`` by using -``protoc`` compiler. And this is how it can be used to serialize result: - -.. literalinclude:: test_protobuf.py - :lines: 62-75 - :dedent: 4 - -In this example server will compute only ``name`` field of the ``Character`` -type, because this is the only field, which was specified in the query. - -.. note:: Using Protocol Buffers "as is" still not the most efficient way to - send result back to the client. Result is sent in denormalized form, so it - contains duplicates of the same data. More efficient way would be more - complicated and probably will be implemented in the future. See - `Netflix/Falcor`_ and `Om Next`_ as examples of using normalized results. - See also how Hiku stores result internally: :py:mod:`hiku.result` - -.. _Protocol Buffers: https://github.com/google/protobuf -.. _GraphQL: http://facebook.github.io/graphql/ -.. _edn: https://github.com/edn-format/edn -.. _Netflix/Falcor: http://netflix.github.io/falcor/ -.. _Om Next: https://github.com/omcljs/om diff --git a/docs/reference/index.rst b/docs/reference/index.rst index 23be987f..a7d1d6be 100644 --- a/docs/reference/index.rst +++ b/docs/reference/index.rst @@ -9,4 +9,3 @@ Reference result expr readers - protobuf/query diff --git a/docs/reference/protobuf/query.rst b/docs/reference/protobuf/query.rst deleted file mode 100644 index a04afe8f..00000000 --- a/docs/reference/protobuf/query.rst +++ /dev/null @@ -1,4 +0,0 @@ -hiku/protobuf/query.proto -========================= - -.. literalinclude:: ../../../hiku/protobuf/query.proto diff --git a/docs/reference/readers.rst b/docs/reference/readers.rst index 66f479d8..805c1900 100644 --- a/docs/reference/readers.rst +++ b/docs/reference/readers.rst @@ -1,5 +1,2 @@ -.. automodule:: hiku.readers.simple - :members: read - -.. automodule:: hiku.readers.protobuf - :members: read +.. automodule:: hiku.readers.graphql + :members: read, read_operation, Operation, OperationType diff --git a/docs/test_asyncio.py b/docs/test_asyncio.py index 6b952f8a..f424942c 100644 --- a/docs/test_asyncio.py +++ b/docs/test_asyncio.py @@ -11,65 +11,77 @@ metadata = MetaData() character_table = Table( - 'character', + "character", metadata, - Column('id', Integer, primary_key=True), - Column('name', String), - Column('species', String), + Column("id", Integer, primary_key=True), + Column("name", String), + Column("species", String), ) actor_table = Table( - 'actor', + "actor", metadata, - Column('id', Integer, primary_key=True), - Column('name', String), - Column('character_id', ForeignKey('character.id'), nullable=False), + Column("id", Integer, primary_key=True), + Column("name", String), + Column("character_id", ForeignKey("character.id"), nullable=False), ) # setup test environment import aiopg.sa + async def init_db(pg_dsn, *, loop): - db_name = 'test_{}'.format(uuid.uuid4().hex) + db_name = "test_{}".format(uuid.uuid4().hex) async with aiopg.sa.create_engine(pg_dsn, loop=loop) as db_engine: async with db_engine.acquire() as conn: - await conn.execute('CREATE DATABASE {0}'.format(db_name)) + await conn.execute("CREATE DATABASE {0}".format(db_name)) return db_name + async def setup_db(db_dsn, *, loop): async with aiopg.sa.create_engine(db_dsn, loop=loop) as db_engine: async with db_engine.acquire() as conn: await conn.execute(CreateTable(character_table)) await conn.execute(CreateTable(actor_table)) - await conn.execute(character_table.insert().values([ - dict(id=1, name='James T. Kirk', species='Human'), - dict(id=2, name='Spock', species='Vulcan/Human'), - dict(id=3, name='Leonard McCoy', species='Human'), - ])) - await conn.execute(actor_table.insert().values([ - dict(id=1, character_id=1, name='William Shatner'), - dict(id=2, character_id=2, name='Leonard Nimoy'), - dict(id=3, character_id=3, name='DeForest Kelley'), - dict(id=4, character_id=1, name='Chris Pine'), - dict(id=5, character_id=2, name='Zachary Quinto'), - dict(id=6, character_id=3, name='Karl Urban'), - ])) + await conn.execute( + character_table.insert().values( + [ + dict(id=1, name="James T. Kirk", species="Human"), + dict(id=2, name="Spock", species="Vulcan/Human"), + dict(id=3, name="Leonard McCoy", species="Human"), + ] + ) + ) + await conn.execute( + actor_table.insert().values( + [ + dict(id=1, character_id=1, name="William Shatner"), + dict(id=2, character_id=2, name="Leonard Nimoy"), + dict(id=3, character_id=3, name="DeForest Kelley"), + dict(id=4, character_id=1, name="Chris Pine"), + dict(id=5, character_id=2, name="Zachary Quinto"), + dict(id=6, character_id=3, name="Karl Urban"), + ] + ) + ) + async def drop_db(pg_dsn, db_name, *, loop): async with aiopg.sa.create_engine(pg_dsn, loop=loop) as db_engine: async with db_engine.acquire() as conn: - await conn.execute('DROP DATABASE {0}'.format(db_name)) + await conn.execute("DROP DATABASE {0}".format(db_name)) + -@pytest.fixture(scope='session', name='db_dsn') +@pytest.fixture(scope="session", name="db_dsn") def db_dsn_fixture(request): loop = asyncio.get_event_loop() - pg_dsn = 'postgresql://postgres:postgres@postgres:5432/postgres' + pg_dsn = "postgresql://postgres:postgres@postgres:5432/postgres" db_name = loop.run_until_complete(init_db(pg_dsn, loop=loop)) - db_dsn = 'postgresql://postgres:postgres@postgres:5432/{}'.format(db_name) + db_dsn = "postgresql://postgres:postgres@postgres:5432/{}".format(db_name) loop.run_until_complete(setup_db(db_dsn, loop=loop)) def fin(): @@ -78,6 +90,7 @@ def fin(): request.addfinalizer(fin) return db_dsn + # define graph from hiku.graph import Graph, Root, Node, Link, Field @@ -85,7 +98,7 @@ def fin(): from hiku.engine import pass_context from hiku.sources.aiopg import FieldsQuery, LinkQuery -SA_ENGINE_KEY = 'sa-engine' +SA_ENGINE_KEY = "sa-engine" character_query = FieldsQuery(SA_ENGINE_KEY, character_table) @@ -97,9 +110,11 @@ def fin(): to_column=actor_table.c.id, ) + async def direct_link(ids): return ids + @pass_context async def to_characters_query(ctx): query = select([character_table.c.id]) @@ -107,6 +122,7 @@ async def to_characters_query(ctx): rows = await conn.execute(query) return [row.id async for row in rows] + @pass_context async def to_actors_query(ctx): query = select([actor_table.c.id]) @@ -114,28 +130,52 @@ async def to_actors_query(ctx): rows = await conn.execute(query) return [row.id async for row in rows] -GRAPH = Graph([ - Node('Character', [ - Field('id', None, character_query), - Field('name', None, character_query), - Field('species', None, character_query), - Link('actors', Sequence[TypeRef['Actor']], character_to_actors_query, - requires='id'), - ]), - Node('Actor', [ - Field('id', None, actor_query), - Field('name', None, actor_query), - Field('character_id', None, actor_query), - Link('character', TypeRef['Character'], - direct_link, requires='character_id'), - ]), - Root([ - Link('characters', Sequence[TypeRef['Character']], - to_characters_query, requires=None), - Link('actors', Sequence[TypeRef['Actor']], - to_actors_query, requires=None), - ]), -]) + +GRAPH = Graph( + [ + Node( + "Character", + [ + Field("id", None, character_query), + Field("name", None, character_query), + Field("species", None, character_query), + Link( + "actors", + Sequence[TypeRef["Actor"]], + character_to_actors_query, + requires="id", + ), + ], + ), + Node( + "Actor", + [ + Field("id", None, actor_query), + Field("name", None, actor_query), + Field("character_id", None, actor_query), + Link( + "character", + TypeRef["Character"], + direct_link, + requires="character_id", + ), + ], + ), + Root( + [ + Link( + "characters", + Sequence[TypeRef["Character"]], + to_characters_query, + requires=None, + ), + Link( + "actors", Sequence[TypeRef["Actor"]], to_actors_query, requires=None + ), + ] + ), + ] +) # test graph @@ -143,77 +183,85 @@ async def to_actors_query(ctx): from hiku.engine import Engine from hiku.result import denormalize -from hiku.readers.simple import read +from hiku.readers.graphql import read from hiku.executors.asyncio import AsyncIOExecutor + async def execute(hiku_engine, sa_engine, graph, query_string): query = read(query_string) result = await hiku_engine.execute(graph, query, {SA_ENGINE_KEY: sa_engine}) return denormalize(graph, result) + @pytest.mark.asyncio(forbid_global_loop=True) async def test_character_to_actors(db_dsn, event_loop): hiku_engine = Engine(AsyncIOExecutor(event_loop)) async with aiopg.sa.create_engine(db_dsn, loop=event_loop) as sa_engine: - result = await execute(hiku_engine, sa_engine, GRAPH, - '[{:characters [:name {:actors [:name]}]}]') + result = await execute( + hiku_engine, + sa_engine, + GRAPH, + "{ characters { name { { actors { name } } } } }", + ) assert result == { - 'characters': [ + "characters": [ { - 'name': 'James T. Kirk', - 'actors': [ - {'name': 'William Shatner'}, - {'name': 'Chris Pine'}, + "name": "James T. Kirk", + "actors": [ + {"name": "William Shatner"}, + {"name": "Chris Pine"}, ], }, { - 'name': 'Spock', - 'actors': [ - {'name': 'Leonard Nimoy'}, - {'name': 'Zachary Quinto'}, + "name": "Spock", + "actors": [ + {"name": "Leonard Nimoy"}, + {"name": "Zachary Quinto"}, ], }, { - 'name': 'Leonard McCoy', - 'actors': [ - {'name': 'DeForest Kelley'}, - {'name': 'Karl Urban'}, + "name": "Leonard McCoy", + "actors": [ + {"name": "DeForest Kelley"}, + {"name": "Karl Urban"}, ], }, ], } + @pytest.mark.asyncio(forbid_global_loop=True) async def test_actor_to_character(db_dsn, event_loop): hiku_engine = Engine(AsyncIOExecutor(event_loop)) async with aiopg.sa.create_engine(db_dsn, loop=event_loop) as sa_engine: - result = await execute(hiku_engine, sa_engine, GRAPH, - '[{:actors [:name {:character [:name]}]}]') + result = await execute( + hiku_engine, sa_engine, GRAPH, "{ actors { name { character { name } } } }" + ) assert result == { - 'actors': [ + "actors": [ { - 'name': 'William Shatner', - 'character': {'name': 'James T. Kirk'}, + "name": "William Shatner", + "character": {"name": "James T. Kirk"}, }, { - 'name': 'Leonard Nimoy', - 'character': {'name': 'Spock'}, + "name": "Leonard Nimoy", + "character": {"name": "Spock"}, }, { - 'name': 'DeForest Kelley', - 'character': {'name': 'Leonard McCoy'}, + "name": "DeForest Kelley", + "character": {"name": "Leonard McCoy"}, }, { - 'name': 'Chris Pine', - 'character': {'name': 'James T. Kirk'}, + "name": "Chris Pine", + "character": {"name": "James T. Kirk"}, }, { - 'name': 'Zachary Quinto', - 'character': {'name': 'Spock'}, + "name": "Zachary Quinto", + "character": {"name": "Spock"}, }, { - 'name': 'Karl Urban', - 'character': {'name': 'Leonard McCoy'}, + "name": "Karl Urban", + "character": {"name": "Leonard McCoy"}, }, ], } diff --git a/docs/test_protobuf.py b/docs/test_protobuf.py deleted file mode 100644 index 95b2981c..00000000 --- a/docs/test_protobuf.py +++ /dev/null @@ -1,58 +0,0 @@ -from hiku.graph import Graph, Node, Root, Field, Link -from hiku.types import String, Sequence, TypeRef - -from basics.test_stage2 import hiku_engine, to_characters_link, character_data - - -GRAPH = Graph([ - Node('Character', [ - Field('name', String, character_data), - Field('species', String, character_data), - ]), - Root([ - Link('characters', Sequence[TypeRef['Character']], - to_characters_link, requires=None), - ]), -]) - - -def test_query_export(): - from hiku.protobuf import query_pb2 - - node = query_pb2.Node() - - link = node.items.add().link - link.name = 'characters' - - field = link.node.items.add().field - field.name = 'name' - - from hiku.builder import build, Q - from hiku.export.protobuf import export - - query = build([ - Q.characters[ - Q.name, - ], - ]) - - message = export(query) - assert message == node - - binary_message = message.SerializeToString() - assert binary_message - - -def test_query_reading(): - from hiku.builder import build, Q - from hiku.export.protobuf import export - - binary_message = export(build([Q.characters[Q.name]])).SerializeToString() - - from hiku.readers.protobuf import read - - query = read(binary_message) - - result = hiku_engine.execute(graph, query) - - assert all(c['name'] for c in result['characters']) diff --git a/hiku/edn.py b/hiku/edn.py deleted file mode 100644 index e100d93a..00000000 --- a/hiku/edn.py +++ /dev/null @@ -1,388 +0,0 @@ -""" -Based on the code from https://github.com/gns24/pydatomic project -""" -from uuid import UUID -from decimal import Decimal -from datetime import datetime -from itertools import chain -from json.encoder import encode_basestring, encode_basestring_ascii # type: ignore # noqa: E501 - -from hiku.utils import ImmutableDict - - -class Symbol(str): - def __repr__(self): - return self - - def __eq__(self, other): - return isinstance(other, type(self)) and super(Symbol, self).__eq__( - other - ) - - def __hash__(self): - return super(Symbol, self).__hash__() - - -class Keyword(str): - def __repr__(self): - return ":{}".format(self) - - def __eq__(self, other): - return isinstance(other, type(self)) and super(Keyword, self).__eq__( - other - ) - - def __hash__(self): - return super(Keyword, self).__hash__() - - -class List(tuple): - def __repr__(self): - return "[{}]".format(" ".join(map(repr, self))) - - -class Tuple(tuple): - def __repr__(self): - return "({})".format(" ".join(map(repr, self))) - - -class Dict(ImmutableDict): - def __repr__(self): - return "{{{}}}".format( - " ".join("{!r} {!r}".format(*i) for i in self.items()) - ) - - -class Set(frozenset): - def __repr__(self): - return "#{{{}}}".format(" ".join(map(repr, self))) - - -class TaggedElement: - def __init__(self, name, value): - self.name = name - self.value = value - - def __repr__(self): - return "#{} {!r}".format(self.name, self.value) - - def __eq__(self, other): - return ( - isinstance(other, type(self)) - and self.name == other.name - and self.value == other.value - ) - - -def coroutine(func): - def start(*args, **kwargs): - cr = func(*args, **kwargs) - next(cr) - return cr - - return start - - -@coroutine -def appender(lst): - while True: - v = yield - lst.append(v) - - -def inst_handler(time_string): - return datetime.strptime(time_string, "%Y-%m-%dT%H:%M:%S.%fZ") - - -TAG_HANDLERS = {"inst": inst_handler, "uuid": UUID} - -STOP_CHARS = " ,\n\r\t" - -_CHAR_HANDLERS = { - "newline": "\n", - "space": " ", - "tab": "\t", -} - -_CHAR_MAP = { - "a": "\a", - "b": "\b", - "f": "\f", - "n": "\n", - "r": "\r", - "t": "\t", - "v": "\v", -} - -_END_CHARS = { - "#": "}", - "{": "}", - "[": "]", - "(": ")", -} - -_NIL = object() - - -@coroutine -def tag_handler(tag_name, tag_handlers): - while True: - c = yield - if c in STOP_CHARS + '{"[(\\#': - break - tag_name += c - elements = [] - handler = parser(appender(elements), tag_handlers) - handler.send(c) - while not elements: - v = yield - handler.send(v) - if tag_name in tag_handlers: - yield tag_handlers[tag_name](elements[0]), True - else: - yield TaggedElement(tag_name, elements[0]), True - yield None, True - - -@coroutine -def character_handler(): - r = yield - while 1: - c = yield - if not c.isalpha(): - if len(r) == 1: - yield r, False - else: - yield _CHAR_HANDLERS[r], False - r += c - - -def parse_number(s): - s = s.rstrip("MN").upper() - if "E" not in s and "." not in s: - return int(s) - return float(s) - - -@coroutine -def number_handler(s): - while 1: - c = yield - if c in "0123456789+-eEMN.": - s += c - else: - yield parse_number(s), False - - -@coroutine -def symbol_handler(s): - while 1: - c = yield - if c in "}])" + STOP_CHARS: - if s[0] == ":": - yield Keyword(s[1:]), False - elif s == "true": - yield True, False - elif s == "false": - yield False, False - elif s == "nil": - yield _NIL, False - else: - yield Symbol(s), False - else: - s += c - - -@coroutine -def parser(target, tag_handlers, stop=None): - handler = None - while True: - c = yield - if handler: - v = handler.send(c) - if v is None: - continue - else: - handler = None - v, consumed = v - if v is not None: - if v is _NIL: - target.send(None) - else: - target.send(v) - if consumed: - continue - if c == stop: - return - if c in STOP_CHARS: - continue - if c == ";": - v = yield - while v != "\n": - pass - elif c == '"': - chars = [] - while 1: - char = yield - if char == "\\": - char = yield - char2 = _CHAR_MAP.get(char) - if char2 is not None: - chars.append(char2) - else: - chars.append(char) - elif char == '"': - target.send("".join(chars)) - break - else: - chars.append(char) - elif c == "\\": - handler = character_handler() - elif c in "0123456789": - handler = number_handler(c) - elif c in "-.": - c2 = yield - if c2.isdigit(): # .5 should be an error - handler = number_handler(c + c2) - else: - handler = symbol_handler(c + c2) - elif c.isalpha() or c == ":": - handler = symbol_handler(c) - elif c in "[({#": - if c == "#": - c2 = yield - if c2 != "{": - handler = tag_handler(c2, tag_handlers) - continue - end_char = _END_CHARS[c] - lst = [] - p = parser(appender(lst), tag_handlers, stop=end_char) - try: - while 1: - v = yield - p.send(v) - except StopIteration: - pass - if c == "[": - target.send(List(lst)) - elif c == "(": - target.send(Tuple(lst)) - elif c == "{": - if len(lst) % 2: - raise Exception( - "Map literal must contain an even " "number of elements" - ) - target.send(Dict(zip(lst[::2], lst[1::2]))) - else: - target.send(Set(lst)) - else: - raise ValueError("Unexpected character in edn", c) - - -def loads(s, tag_handlers=None): - if not isinstance(s, str): - raise TypeError( - 'The EDN value must be "str", not {!r}'.format(type(s).__name__) - ) - lst = [] - target = parser(appender(lst), dict(tag_handlers or (), **TAG_HANDLERS)) - for c in s: - target.send(c) - target.send(" ") - if len(lst) != 1: - raise ValueError( - "Expected exactly one top-level element " "in edn string", s - ) - return lst[0] - - -def _iterencode_items(items, default, encoder): - items_iter = iter(items) - try: - first = next(items_iter) - except StopIteration: - return - for chunk in _iterencode(first, default, encoder): - yield chunk - while True: - try: - next_item = next(items_iter) - except StopIteration: - return - yield " " - for chunk in _iterencode(next_item, default, encoder): - yield chunk - - -def _default(obj): - raise ValueError("{!r} is not EDN serializable".format(obj)) - - -def _iterencode(obj, default, encoder): - if obj is None: - yield "nil" - elif obj is True: - yield "true" - elif obj is False: - yield "false" - elif isinstance(obj, int): - yield str(int(obj)) - elif isinstance(obj, float): - # FIXME: proper float encoding - yield str(float(obj)) - elif isinstance(obj, Decimal): - yield "{}M".format(obj) - elif isinstance(obj, Keyword): - yield ":{}".format(obj) - elif isinstance(obj, Symbol): - yield obj - elif isinstance(obj, str): - yield encoder(obj) - elif isinstance(obj, (list, List)): - # NOTE: `(list, List)` check should be before `(tuple, Tuple)`, - # because `List` is implemented as tuple subclass - yield "[" - for chunk in _iterencode_items(obj, default, encoder): - yield chunk - yield "]" - elif isinstance(obj, (tuple, Tuple)): - yield "(" - for chunk in _iterencode_items(obj, default, encoder): - yield chunk - yield ")" - elif isinstance(obj, (dict, Dict)): - yield "{" - for chunk in _iterencode_items( - chain.from_iterable(obj.items()), default, encoder - ): - yield chunk - yield "}" - elif isinstance(obj, (set, Set)): - yield "#{" - for chunk in _iterencode_items(obj, default, encoder): - yield chunk - yield "}" - elif isinstance(obj, datetime): - # FIXME: proper RFC-3339 encoding - assert not obj.tzinfo - yield obj.strftime('#inst "%Y-%m-%dT%H:%M:%S.%fZ"') - elif isinstance(obj, UUID): - yield '#uuid "{}"'.format(obj) - elif isinstance(obj, TaggedElement): - yield "#{} ".format(obj.name) - for chunk in _iterencode(obj.value, _default, encoder): - yield chunk - else: - obj = default(obj) - for chunk in _iterencode(obj, default, encoder): - yield chunk - - -def dumps(obj, default=None, ensure_ascii=True): - if default is None: - default = _default - if ensure_ascii: - encoder = encode_basestring_ascii - else: - encoder = encode_basestring - return "".join(_iterencode(obj, default, encoder)) diff --git a/hiku/export/protobuf.py b/hiku/export/protobuf.py deleted file mode 100644 index aed14c23..00000000 --- a/hiku/export/protobuf.py +++ /dev/null @@ -1,36 +0,0 @@ -from google.protobuf.json_format import ParseDict - -from ..query import QueryVisitor -from ..protobuf import query_pb2 - - -class Exporter(QueryVisitor): - def __init__(self, node): - self.stack = [node] - - def visit_field(self, obj): - field = self.stack[-1].items.add().field - field.name = obj.name - if obj.options is not None: - ParseDict(obj.options, field.options) - - def visit_link(self, obj): - link = self.stack[-1].items.add().link - link.name = obj.name - if obj.options is not None: - ParseDict(obj.options, link.options) - self.stack.append(link.node) - try: - self.visit(obj.node) - finally: - self.stack.pop() - - def visit_node(self, obj): - for item in obj.fields: - self.visit(item) - - -def export(query): - node = query_pb2.Node() - Exporter(node).visit(query) - return node diff --git a/hiku/export/simple.py b/hiku/export/simple.py deleted file mode 100644 index 96b73269..00000000 --- a/hiku/export/simple.py +++ /dev/null @@ -1,38 +0,0 @@ -from ..edn import Keyword, Dict, Tuple, List, Set -from ..query import QueryVisitor - - -def _encode(value): - if value is None: - return value - elif isinstance(value, (str, bool, int, float)): - return value - elif isinstance(value, list): - return List(_encode(val) for val in value) - elif isinstance(value, dict): - return Dict((Keyword(key), _encode(val)) for key, val in value.items()) - elif isinstance(value, (set, frozenset)): - return Set(_encode(val) for val in value) - else: - raise TypeError("Unsupported type: {!r}".format(value)) - - -class Exporter(QueryVisitor): - def visit_field(self, obj): - f = Keyword(obj.name) - if obj.options is not None: - f = Tuple([f, _encode(obj.options)]) - return f - - def visit_link(self, obj): - lnk = Keyword(obj.name) - if obj.options is not None: - lnk = Tuple([lnk, _encode(obj.options)]) - return Dict([(lnk, self.visit(obj.node))]) - - def visit_node(self, obj): - return List(self.visit(f) for f in obj.fields) - - -def export(query): - return Exporter().visit(query) diff --git a/hiku/expr/core.py b/hiku/expr/core.py index f3286a6a..c7364130 100644 --- a/hiku/expr/core.py +++ b/hiku/expr/core.py @@ -1,10 +1,11 @@ """ - hiku.expr.core - ~~~~~~~~~~~~~~ +hiku.expr.core +~~~~~~~~~~~~~~ - Expression building blocks +Expression building blocks """ + import typing as t from functools import wraps @@ -12,7 +13,7 @@ from collections import namedtuple from ..compat import ParamSpec -from ..edn import loads + from ..query import Node as QueryNode, Link, Field, Base as QueryBase from ..types import ( Record, @@ -20,7 +21,6 @@ Any, GenericMeta, ) -from ..readers.simple import transform from .nodes import Symbol, Tuple, List, Keyword, Dict, Node @@ -142,17 +142,7 @@ def expr(*args: P.args, **kw: P.kwargs) -> _Func: expr.__def_name__ = name # type: ignore[attr-defined] expr.__def_body__ = fn # type: ignore[attr-defined] - - if len(types) == 1 and isinstance(types[0], str): - reqs_list = loads(str(types[0])) - expr.__def_type__ = Callable[ # type: ignore[attr-defined] - [ - (_query_to_types(transform(r)) if r is not None else Any) - for r in reqs_list - ] - ] - else: - expr.__def_type__ = Callable[types] # type: ignore[attr-defined] + expr.__def_type__ = Callable[types] # type: ignore[attr-defined] return expr return decorator diff --git a/hiku/protobuf/__init__.py b/hiku/protobuf/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/hiku/protobuf/query.proto b/hiku/protobuf/query.proto deleted file mode 100644 index 4fc2660a..00000000 --- a/hiku/protobuf/query.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -package hiku.protobuf.query; - -import "google/protobuf/struct.proto"; - -message Field { - string name = 1; - google.protobuf.Struct options = 3; -} - -message Link { - string name = 1; - Node node = 2; - google.protobuf.Struct options = 4; -} - -message Item { - oneof value { - Field field = 1; - Link link = 2; - } -} - -message Node { - repeated Item items = 1; -} diff --git a/hiku/protobuf/query_pb2.py b/hiku/protobuf/query_pb2.py deleted file mode 100644 index ec9e6317..00000000 --- a/hiku/protobuf/query_pb2.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: hiku/protobuf/query.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x19hiku/protobuf/query.proto\x12\x13hiku.protobuf.query\x1a\x1cgoogle/protobuf/struct.proto"?\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07options\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct"g\n\x04Link\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x04node\x18\x02 \x01(\x0b\x32\x19.hiku.protobuf.query.Node\x12(\n\x07options\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct"g\n\x04Item\x12+\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1a.hiku.protobuf.query.FieldH\x00\x12)\n\x04link\x18\x02 \x01(\x0b\x32\x19.hiku.protobuf.query.LinkH\x00\x42\x07\n\x05value"0\n\x04Node\x12(\n\x05items\x18\x01 \x03(\x0b\x32\x19.hiku.protobuf.query.Itemb\x06proto3' -) - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "hiku.protobuf.query_pb2", _globals -) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals["_FIELD"]._serialized_start = 80 - _globals["_FIELD"]._serialized_end = 143 - _globals["_LINK"]._serialized_start = 145 - _globals["_LINK"]._serialized_end = 248 - _globals["_ITEM"]._serialized_start = 250 - _globals["_ITEM"]._serialized_end = 353 - _globals["_NODE"]._serialized_start = 355 - _globals["_NODE"]._serialized_end = 403 -# @@protoc_insertion_point(module_scope) diff --git a/hiku/query.py b/hiku/query.py index b75e3b4c..3582d1a1 100644 --- a/hiku/query.py +++ b/hiku/query.py @@ -1,49 +1,39 @@ """ - hiku.query - ~~~~~~~~~~ +hiku.query +~~~~~~~~~~ - `Hiku` doesn't rely on any specific query language, internally it uses - generic query representation to describe result and it could be constructed - by parsing different suitable query languages. +`Hiku` doesn't rely on any specific query language, internally it uses +generic query representation to describe result and it could be constructed +by parsing different suitable query languages. - However, `Hiku` provides one built-in way to describe result -- `edn`_ data - structure -- `simple` queries, which are very similar to the `om.next`_ - queries, which are inspired by `Datomic Pull API`_. +However, `Hiku` provides built-in way to parse graphql +query into `hiku.query.Node`: - - ``[:foo]`` - node fields definition (`edn` keywords in vector) - - ``{:bar [:baz]}`` - link definition (`edn` map with keyword and vector) - - ``(:foo {:key val})`` - field or link options definition (field name or - link name, wrapped with `edn` list with map of options as a second - value) +Example: - Example: +.. code-block:: graphql - .. code-block:: clojure + { foo { bar { baz } } } - [:foo {:bar [:baz]}] +This query will be read internally as: - This query will be read internally as: +.. code-block:: python - .. code-block:: python + Node([Field('foo'), + Link('bar', Node([Field('baz')]))]) - Node([Field('foo'), - Link('bar', Node([Field('baz')]))]) +And query result will look like this: - And query result will look like this: +.. code-block:: python - .. code-block:: python - - { - 'foo': 1, - 'bar': { - 'baz': 2, - }, - } - - .. _edn: https://github.com/edn-format/edn - .. _Datomic Pull API: http://docs.datomic.com/pull.html - .. _om.next: https://github.com/omcljs/om/wiki/Documentation-(om.next) + { + 'foo': 1, + 'bar': { + 'baz': 2, + }, + } """ + import typing as t import hashlib diff --git a/hiku/readers/graphql.py b/hiku/readers/graphql.py index 98d1ef18..8b8120dc 100644 --- a/hiku/readers/graphql.py +++ b/hiku/readers/graphql.py @@ -1,3 +1,10 @@ +""" +hiku.readers.graphql +~~~~~~~~~~~~~~~~~~~~ + +Support for queries encoded using GraphQL syntax. + +""" from typing import Any, cast, Dict, Iterator, List, Optional, Set, Union from graphql.language import ast diff --git a/hiku/readers/protobuf.py b/hiku/readers/protobuf.py deleted file mode 100644 index 283a02af..00000000 --- a/hiku/readers/protobuf.py +++ /dev/null @@ -1,75 +0,0 @@ -""" - hiku.readers.protobuf - ~~~~~~~~~~~~~~~~~~~~~ - - Support for queries encoded using Protocol Buffers - -""" -from google.protobuf.json_format import MessageToDict - -from ..query import Node, Link, Field, merge -from ..protobuf import query_pb2 - - -def _transform(pb_node): - fields = [] - for i in pb_node.items: - item_type = i.WhichOneof("value") - if item_type == "field": - if not i.field.name: - raise TypeError("Field name is empty: {!r}".format(i)) - options = None - if i.field.HasField("options"): - options = MessageToDict(i.field.options) - fields.append(Field(i.field.name, options)) - elif item_type == "link": - if not i.link.name: - raise TypeError("Link name is empty: {!r}".format(i)) - options = None - if i.link.HasField("options"): - options = MessageToDict(i.link.options) - fields.append(Link(i.link.name, _transform(i.link.node), options)) - else: - raise TypeError("Node item is empty: {!r}".format(i)) - return Node(fields) - - -def transform(pb_node): - return merge([_transform(pb_node)]) - - -def read(data): - """Reads a query, encoded into binary Protocol Buffers format, using - message types from the ``hiku.protobuf.query`` package. - - Proto-file location: ``hiku/protobuf/query.proto`` - - Generated message types: ``hiku.protobuf.query_pb2`` - - Example: - - .. code-block:: python - - from hiku.builder import Q, build - from hiku.export.protobuf import export - - bin_query = export(build([ - Q.characters(limit=100)[ - Q.name, - ], - ])).SerializeToString() - - assert bin_query == ( - b'\\n(\\x12&\\n\\ncharacters\\x12\\n\\n\\x08\\n\\x06\\n\\x04' - b'name\\x1a\\x0c\\n\\x05limit\\x12\\x03\\x10\\xc8\\x01' - ) - - query = read(bin_query) # reading binary message - - result = engine.execute(graph, query) - - :param bytes data: binary message representation - :return: :py:class:`hiku.query.Node`, ready to execute query object - """ - pb_value = query_pb2.Node.FromString(data) - return transform(pb_value) diff --git a/hiku/readers/simple.py b/hiku/readers/simple.py deleted file mode 100644 index a462748b..00000000 --- a/hiku/readers/simple.py +++ /dev/null @@ -1,97 +0,0 @@ -""" - hiku.readers.simple - ~~~~~~~~~~~~~~~~~~~ - - Support for queries encoded using EDN format - -""" -import typing as t - -from ..edn import loads, Dict, List, Keyword, Tuple -from ..query import Node, Link, Field, merge - - -def _get_options(value): - if len(value) < 2: - raise TypeError("Missing options argument") - elif len(value) > 2: - raise TypeError("More arguments than expected") - - keyword_value, options_value = value - if not isinstance(keyword_value, Keyword): - raise TypeError( - "Names should be specified as keywords, not as {!r}".format( - type(keyword_value) - ) - ) - - if not isinstance(options_value, Dict): - raise TypeError( - "Options should be specified as mapping, not as {!r}".format( - type(options_value) - ) - ) - - non_keyword = set( - (k, type(k)) for k in options_value.keys() if not isinstance(k, Keyword) - ) - if non_keyword: - keys_repr = " ".join("{} {!r}".format(k, t) for k, t in non_keyword) - raise TypeError( - "Option names should be specified as keywords: {}".format(keys_repr) - ) - - name = str(keyword_value) - options = {str(k): v for k, v in options_value.items()} - return name, options - - -def _extract(values): - for value in values: - if isinstance(value, Tuple): - name, options = _get_options(value) - yield Field(name, options) - elif isinstance(value, Keyword): - yield Field(str(value)) - elif isinstance(value, Dict): - for key, val in value.items(): - if isinstance(key, Tuple): - name, options = _get_options(key) - elif isinstance(key, Keyword): - name = str(key) - options = None - else: - raise TypeError( - "Link name defined not as keyword, " - "but as {!r}".format(key) - ) - yield Link(name, transform(val), options) - else: - raise TypeError("Invalid node member: {!r}".format(value)) - - -def transform(value: t.List) -> Node: - if isinstance(value, List): - return merge([Node(list(_extract(value)))]) - else: - raise TypeError( - "Node should be defined as vector, " - "{!r} provided instead".format(value) - ) - - -def read(src): - """Reads a query, encoded using EDN format - - Example: - - .. code-block:: python - - query = read('[{(:characters {:limit 100}) [:name]}]') - result = engine.execute(graph, query) - - :param str src: EDN-encoded data structure - :return: :py:class:`hiku.query.Node`, ready to execute query object - """ - edn_ast = loads(src) - return transform(edn_ast) diff --git a/hiku/result.py b/hiku/result.py index b566b43e..a52c80d0 100644 --- a/hiku/result.py +++ b/hiku/result.py @@ -1,23 +1,24 @@ """ - hiku.result - ~~~~~~~~~~~ +hiku.result +~~~~~~~~~~~ - In all examples query results are showed in **denormalized** form, suitable - for reading (by humans) and for serializing into simple formats, into `JSON` - for example. But this is not how `Hiku` stores result internally. +In all examples query results are showed in **denormalized** form, suitable +for reading (by humans) and for serializing into simple formats, into `JSON` +for example. But this is not how `Hiku` stores result internally. - Internally `Hiku` stores result in a fully **normalized** form. So result in - `Hiku` is also a graph structure with references between objects. This - approach has lots of advantages: +Internally `Hiku` stores result in a fully **normalized** form. So result in +`Hiku` is also a graph structure with references between objects. This +approach has lots of advantages: - - normalization helps to heavily reduce size of serialized result when we - need to transfer it (this avoids data duplication) - - it reduces internal memory usage and simplifies work with data - internally - - gives ability to cache, precisely and effortlessly update local state - on the client + - normalization helps to heavily reduce size of serialized result when we + need to transfer it (this avoids data duplication) + - it reduces internal memory usage and simplifies work with data + internally + - gives ability to cache, precisely and effortlessly update local state + on the client """ + import typing as t from collections import defaultdict @@ -235,7 +236,7 @@ def denormalize(graph: Graph, result: Proxy) -> t.Dict: .. code-block:: python - query = hiku.readers.simple.read('[:foo]') + query = hiku.readers.graphql.read('{ foo }') norm_result = hiku_engine.execute(graph, query) result = hiku.result.denormalize(graph, norm_result) assert result == {'foo': 'value'} diff --git a/hiku/typedef/__init__.py b/hiku/typedef/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/hiku/typedef/kinko.py b/hiku/typedef/kinko.py deleted file mode 100644 index 94703075..00000000 --- a/hiku/typedef/kinko.py +++ /dev/null @@ -1,163 +0,0 @@ -from contextlib import contextmanager - -from ..types import Record, RecordMeta, OptionalMeta, SequenceMeta, MappingMeta -from ..types import GenericMeta, Any -from ..graph import GraphTypes - -from .types import TypeDef - - -# TODO: revisit this -_CONTAINER_TYPES = ( - OptionalMeta, - SequenceMeta, - MappingMeta, - RecordMeta, -) - - -class TypeDoc: - def __init__(self, type_, description): - self.__type__ = type_ - self.__type_description__ = description - - def __getattr__(self, name): - return getattr(self.__type, name) - - -class GraphTypesEx(GraphTypes): - def visit(self, obj): - t = super(GraphTypesEx, self).visit(obj) - if isinstance(t, GenericMeta) and obj.description is not None: - t = TypeDoc(t, obj.description) - return t - - def visit_graph(self, obj): - types_map = super(GraphTypesEx, self).visit_graph(obj) - root_types_map = types_map["__root__"].__field_types__ - type_defs = [ - TypeDef[n, t] - for n, t in types_map.items() - if t is not Any - if n != "__root__" - ] - type_defs.extend( - TypeDef[n, t] for n, t in root_types_map.items() if t is not Any - ) - return type_defs - - def visit_node(self, obj): - record = super(GraphTypesEx, self).visit_node(obj) - return Record[ - [(n, t) for n, t in record.__field_types__.items() if t is not Any] - ] - - def visit_root(self, obj): - record = super(GraphTypesEx, self).visit_root(obj) - return Record[ - [(n, t) for n, t in record.__field_types__.items() if t is not Any] - ] - - -class _LinePrinter: - def visit(self, type_): - return type_.accept(self) - - def visit_boolean(self, type_): - return "Boolean" - - def visit_string(self, type_): - return "String" - - def visit_integer(self, type_): - return "Integer" - - def visit_float(self, type_): - return "Unknown" - - def visit_typeref(self, type_): - return type_.__type_name__ - - def visit_any(self, type_): - return "Unknown" - - -class _IndentedPrinter: - _indent_size = 2 - - def __init__(self): - self._indent = 0 - self._buffer = [] - self._descriptions = {} - - @contextmanager - def _add_indent(self): - self._indent += 1 - yield - self._indent -= 1 - - def _newline(self): - self._buffer.append("") - - def _print_call(self, line): - self._buffer.append((" " * self._indent_size * self._indent) + line) - - def _print_arg(self, type_): - if isinstance(type_, TypeDoc): - self._descriptions[ - len(self._buffer) - 1 - ] = type_.__type_description__ - type_ = type_.__type__ - if isinstance(type_, _CONTAINER_TYPES): - with self._add_indent(): - self.visit(type_) - else: - self._buffer[-1] += " " + _LinePrinter().visit(type_) - - def _iter_lines(self): - for i, line in enumerate(self._buffer): - if i in self._descriptions: - yield line + " ; {}".format(self._descriptions[i]) - else: - yield line - - @classmethod - def dumps(cls, types): - printer = cls() - for i, type_ in enumerate(types): - if i > 0: - printer._newline() - printer.visit(type_) - return "\n".join(printer._iter_lines()) + "\n" - - def visit(self, type_): - type_.accept(self) - - def visit_typedef(self, type_): - self._print_call("type {}".format(type_.__type_name__)) - self._print_arg(type_.__type__) - - def visit_record(self, type_): - self._print_call("Record") - with self._add_indent(): - for name, field_type in type_.__field_types__.items(): - self._print_call(":{}".format(name)) - self._print_arg(field_type) - - def visit_sequence(self, type_): - self._print_call("List") - self._print_arg(type_.__item_type__) - - def visit_mapping(self, type_): - self._print_call("Dict") - self._print_arg(type_.__key_type__) - self._print_arg(type_.__value_type__) - - def visit_optional(self, type_): - self._print_call("Option") - self._print_arg(type_.__type__) - - -def dumps(graph): - types = GraphTypesEx().visit(graph) - return _IndentedPrinter.dumps(types) diff --git a/hiku/typedef/types.py b/hiku/typedef/types.py deleted file mode 100644 index 16b9398c..00000000 --- a/hiku/typedef/types.py +++ /dev/null @@ -1,13 +0,0 @@ -from ..types import TypingMeta - - -class TypeDefMeta(TypingMeta): - def __cls_init__(cls, params): - cls.__type_name__, cls.__type__ = params - - def accept(cls, visitor): - return visitor.visit_typedef(cls) - - -class TypeDef(metaclass=TypeDefMeta): - pass diff --git a/hiku/types.py b/hiku/types.py index 9e2786c9..86b4004e 100644 --- a/hiku/types.py +++ b/hiku/types.py @@ -4,11 +4,10 @@ from collections import OrderedDict from typing import TypeVar -from hiku.scalar import ScalarMeta - if t.TYPE_CHECKING: from hiku.graph import Union, Interface from hiku.enum import BaseEnum + from hiku.scalar import ScalarMeta class GenericMeta(type): @@ -455,7 +454,7 @@ def get_type( # type: ignore[misc] @t.overload -def get_type(types: Types, typ: ScalarMeta) -> "ScalarMeta": +def get_type(types: Types, typ: "ScalarMeta") -> "ScalarMeta": ... diff --git a/pyproject.toml b/pyproject.toml index 7b7c7670..6e08ddf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,6 @@ test = [ dev = [ "graphql-core>=3.2.3", # for graphql support "sqlalchemy<2", - "protobuf>=4.24.4", # for protobuf support "aiopg>=1.4.0", # for async postgresql support "prometheus-client>=0.17.1", # for prometheus metrics "sentry-sdk>=1.31.0", # for sentry tracing @@ -68,7 +67,6 @@ dev = [ "mypy>=1.4.1", "black>=23.3.0", "flake8>=5.0.4", - "grpcio-tools>=1.59.0", ] docs = ["sphinx>=5.3.0", "furo>=2023.3.27"] examples = ["flask>=2.2.5", "aiohttp>=3.8.6"] @@ -89,10 +87,6 @@ docs = "sphinx-build -b html docs docs/build" docs-open = "open docs/build/index.html" check = { composite = ["fmt", "test", "mypy", "flake"] } example = "python examples/{args:graphql_flask}.py" -proto-query = "python -m grpc_tools.protoc -I. --python_out=. hiku/protobuf/query.proto" -proto-tests = "python -m grpc_tools.protoc -I. --python_out=. tests/protobuf/result.proto" -proto-example = "python -m grpc_tools.protoc -I. --python_out=. docs/example.proto" -proto = { composite = ["proto-query", "proto-tests", "proto-example"] } [tool.pytest.ini_options] addopts = "--tb=native --benchmark-disable" @@ -132,31 +126,6 @@ module = "hiku.telemetry.*" disallow_untyped_defs = false check_untyped_defs = false -[[tool.mypy.overrides]] -module = "hiku.edn.*" -disallow_untyped_defs = false -check_untyped_defs = false - -[[tool.mypy.overrides]] -module = "hiku.export.simple.*" -disallow_untyped_defs = false -check_untyped_defs = false - -[[tool.mypy.overrides]] -module = "hiku.export.protobuf.*" -disallow_untyped_defs = false -check_untyped_defs = false - -[[tool.mypy.overrides]] -module = "hiku.readers.simple.*" -disallow_untyped_defs = false -check_untyped_defs = false - -[[tool.mypy.overrides]] -module = "hiku.readers.protobuf.*" -disallow_untyped_defs = false -check_untyped_defs = false - [[tool.mypy.overrides]] module = "hiku.expr.refs.*" disallow_untyped_defs = false @@ -182,11 +151,6 @@ module = "hiku.sources.aiopg.*" disallow_untyped_defs = false check_untyped_defs = false -[[tool.mypy.overrides]] -module = "hiku.typedef.*" -disallow_untyped_defs = false -check_untyped_defs = false - [[tool.mypy.overrides]] module = "hiku.builder.*" disallow_untyped_defs = false diff --git a/tests/protobuf/__init__.py b/tests/protobuf/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/protobuf/result.proto b/tests/protobuf/result.proto deleted file mode 100644 index fdd49f38..00000000 --- a/tests/protobuf/result.proto +++ /dev/null @@ -1,60 +0,0 @@ -syntax = "proto3"; - -package tests.protobuf.result; - -message cosies { - string nerv = 1; - string doghead = 2; - kir mistic = 3; - repeated kir biopics = 4; -} - -message kir { - string panton = 1; - string tamsin = 2; - cosies bahut = 3; - repeated cosies paramo = 4; -} - -message saunas { - message WentType { string changer = 1; } - message AtelierType { string litas = 1; } - message MatworkType { string bashaw = 1; } - - WentType went = 1; // maybe - AtelierType atelier = 2; // one - repeated MatworkType matwork = 3; // many -} - -message Root { - message TatlerType { string orudis = 1; } - message CoomType { string yappers = 1; } - message BarbaryType { string betty = 1; } - message FlossyType { - message AnoxicType { string peeps = 1; } - message SeggenType { string pensive = 1; } - message NeckerType { string carney = 1; } - // simple - string demoing = 1; - // complex - AnoxicType anoxic = 2; // maybe - SeggenType seggen = 3; // one - repeated NeckerType necker = 4; // many - // links - cosies daur = 5; - repeated cosies peafowl = 6; - cosies carf = 7; - } - // simple - string slotted = 1; - // complex - TatlerType tatler = 2; - CoomType coom = 3; - repeated BarbaryType barbary = 4; - // nested - FlossyType flossy = 5; - // links - cosies zareeba = 6; - repeated cosies crowdie = 7; - saunas moujik = 8; -} diff --git a/tests/protobuf/result_pb2.py b/tests/protobuf/result_pb2.py deleted file mode 100644 index 92fccbe4..00000000 --- a/tests/protobuf/result_pb2.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: tests/protobuf/result.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1btests/protobuf/result.proto\x12\x15tests.protobuf.result\"\x80\x01\n\x06\x63osies\x12\x0c\n\x04nerv\x18\x01 \x01(\t\x12\x0f\n\x07\x64oghead\x18\x02 \x01(\t\x12*\n\x06mistic\x18\x03 \x01(\x0b\x32\x1a.tests.protobuf.result.kir\x12+\n\x07\x62iopics\x18\x04 \x03(\x0b\x32\x1a.tests.protobuf.result.kir\"\x82\x01\n\x03kir\x12\x0e\n\x06panton\x18\x01 \x01(\t\x12\x0e\n\x06tamsin\x18\x02 \x01(\t\x12,\n\x05\x62\x61hut\x18\x03 \x01(\x0b\x32\x1d.tests.protobuf.result.cosies\x12-\n\x06paramo\x18\x04 \x03(\x0b\x32\x1d.tests.protobuf.result.cosies\"\x90\x02\n\x06saunas\x12\x34\n\x04went\x18\x01 \x01(\x0b\x32&.tests.protobuf.result.saunas.WentType\x12:\n\x07\x61telier\x18\x02 \x01(\x0b\x32).tests.protobuf.result.saunas.AtelierType\x12:\n\x07matwork\x18\x03 \x03(\x0b\x32).tests.protobuf.result.saunas.MatworkType\x1a\x1b\n\x08WentType\x12\x0f\n\x07\x63hanger\x18\x01 \x01(\t\x1a\x1c\n\x0b\x41telierType\x12\r\n\x05litas\x18\x01 \x01(\t\x1a\x1d\n\x0bMatworkType\x12\x0e\n\x06\x62\x61shaw\x18\x01 \x01(\t\"\xaa\x07\n\x04Root\x12\x0f\n\x07slotted\x18\x01 \x01(\t\x12\x36\n\x06tatler\x18\x02 \x01(\x0b\x32&.tests.protobuf.result.Root.TatlerType\x12\x32\n\x04\x63oom\x18\x03 \x01(\x0b\x32$.tests.protobuf.result.Root.CoomType\x12\x38\n\x07\x62\x61rbary\x18\x04 \x03(\x0b\x32\'.tests.protobuf.result.Root.BarbaryType\x12\x36\n\x06\x66lossy\x18\x05 \x01(\x0b\x32&.tests.protobuf.result.Root.FlossyType\x12.\n\x07zareeba\x18\x06 \x01(\x0b\x32\x1d.tests.protobuf.result.cosies\x12.\n\x07\x63rowdie\x18\x07 \x03(\x0b\x32\x1d.tests.protobuf.result.cosies\x12-\n\x06moujik\x18\x08 \x01(\x0b\x32\x1d.tests.protobuf.result.saunas\x1a\x1c\n\nTatlerType\x12\x0e\n\x06orudis\x18\x01 \x01(\t\x1a\x1b\n\x08\x43oomType\x12\x0f\n\x07yappers\x18\x01 \x01(\t\x1a\x1c\n\x0b\x42\x61rbaryType\x12\r\n\x05\x62\x65tty\x18\x01 \x01(\t\x1a\xca\x03\n\nFlossyType\x12\x0f\n\x07\x64\x65moing\x18\x01 \x01(\t\x12\x41\n\x06\x61noxic\x18\x02 \x01(\x0b\x32\x31.tests.protobuf.result.Root.FlossyType.AnoxicType\x12\x41\n\x06seggen\x18\x03 \x01(\x0b\x32\x31.tests.protobuf.result.Root.FlossyType.SeggenType\x12\x41\n\x06necker\x18\x04 \x03(\x0b\x32\x31.tests.protobuf.result.Root.FlossyType.NeckerType\x12+\n\x04\x64\x61ur\x18\x05 \x01(\x0b\x32\x1d.tests.protobuf.result.cosies\x12.\n\x07peafowl\x18\x06 \x03(\x0b\x32\x1d.tests.protobuf.result.cosies\x12+\n\x04\x63\x61rf\x18\x07 \x01(\x0b\x32\x1d.tests.protobuf.result.cosies\x1a\x1b\n\nAnoxicType\x12\r\n\x05peeps\x18\x01 \x01(\t\x1a\x1d\n\nSeggenType\x12\x0f\n\x07pensive\x18\x01 \x01(\t\x1a\x1c\n\nNeckerType\x12\x0e\n\x06\x63\x61rney\x18\x01 \x01(\tb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.protobuf.result_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals['_COSIES']._serialized_start=55 - _globals['_COSIES']._serialized_end=183 - _globals['_KIR']._serialized_start=186 - _globals['_KIR']._serialized_end=316 - _globals['_SAUNAS']._serialized_start=319 - _globals['_SAUNAS']._serialized_end=591 - _globals['_SAUNAS_WENTTYPE']._serialized_start=503 - _globals['_SAUNAS_WENTTYPE']._serialized_end=530 - _globals['_SAUNAS_ATELIERTYPE']._serialized_start=532 - _globals['_SAUNAS_ATELIERTYPE']._serialized_end=560 - _globals['_SAUNAS_MATWORKTYPE']._serialized_start=562 - _globals['_SAUNAS_MATWORKTYPE']._serialized_end=591 - _globals['_ROOT']._serialized_start=594 - _globals['_ROOT']._serialized_end=1532 - _globals['_ROOT_TATLERTYPE']._serialized_start=984 - _globals['_ROOT_TATLERTYPE']._serialized_end=1012 - _globals['_ROOT_COOMTYPE']._serialized_start=1014 - _globals['_ROOT_COOMTYPE']._serialized_end=1041 - _globals['_ROOT_BARBARYTYPE']._serialized_start=1043 - _globals['_ROOT_BARBARYTYPE']._serialized_end=1071 - _globals['_ROOT_FLOSSYTYPE']._serialized_start=1074 - _globals['_ROOT_FLOSSYTYPE']._serialized_end=1532 - _globals['_ROOT_FLOSSYTYPE_ANOXICTYPE']._serialized_start=1444 - _globals['_ROOT_FLOSSYTYPE_ANOXICTYPE']._serialized_end=1471 - _globals['_ROOT_FLOSSYTYPE_SEGGENTYPE']._serialized_start=1473 - _globals['_ROOT_FLOSSYTYPE_SEGGENTYPE']._serialized_end=1502 - _globals['_ROOT_FLOSSYTYPE_NECKERTYPE']._serialized_start=1504 - _globals['_ROOT_FLOSSYTYPE_NECKERTYPE']._serialized_end=1532 -# @@protoc_insertion_point(module_scope) diff --git a/tests/test_edn.py b/tests/test_edn.py deleted file mode 100644 index e200df1a..00000000 --- a/tests/test_edn.py +++ /dev/null @@ -1,29 +0,0 @@ -from hiku.edn import loads, dumps -from hiku.edn import List, Keyword, Dict, TaggedElement, Tuple, Symbol - - -def test_symbol(): - assert loads("foo") == Symbol("foo") - - -def test_nil(): - assert loads("[1 nil 2]") == List([1, None, 2]) - - -def test_loads(): - n = loads( - "[:foo {:bar [:baz]} (limit 10) " - '#foo/uuid "678d88b2-87b0-403b-b63d-5da7465aecc3"]' - ) - assert n == List( - [ - Keyword("foo"), - Dict({Keyword("bar"): List([Keyword("baz")])}), - Tuple([Symbol("limit"), 10]), - TaggedElement("foo/uuid", "678d88b2-87b0-403b-b63d-5da7465aecc3"), - ] - ) - - -def test_tagged_element(): - assert dumps(TaggedElement("foo/bar", "baz")) == '#foo/bar "baz"' diff --git a/tests/test_export_protobuf.py b/tests/test_export_protobuf.py deleted file mode 100644 index eaaf3ef2..00000000 --- a/tests/test_export_protobuf.py +++ /dev/null @@ -1,64 +0,0 @@ -import pytest - -from google.protobuf.json_format import ParseError - -from hiku.query import Field, Link, Node -from hiku.protobuf import query_pb2 -from hiku.export.protobuf import export - - -UNKNOWN = object() - - -def test_node(): - node = query_pb2.Node() - node.items.add().field.name = "aimer" - - query = Node([Field("aimer")]) - - assert export(query) == node - - -def test_field_options(): - node = query_pb2.Node() - field = node.items.add().field - field.name = "cody" - field.options["kink"] = 1234 - field.options["cithara"] = "slasher" - - query = Node([Field("cody", options={"kink": 1234, "cithara": "slasher"})]) - - assert export(query) == node - - -def test_link_options(): - node = query_pb2.Node() - link = node.items.add().link - link.name = "pommee" - link.options["takest"] = 3456 - link.options["decoy"] = "nyroca" - - field = link.node.items.add().field - field.name = "fugazi" - - query = Node( - [ - Link( - "pommee", - Node([Field("fugazi")]), - options={"takest": 3456, "decoy": "nyroca"}, - ) - ] - ) - - assert export(query) == node - - -def test_invalid_options(): - with pytest.raises(ParseError) as type_err: - export(Node([Field("kott", options={"clauber": UNKNOWN})])) - type_err.match("has unexpected type") - - with pytest.raises(ParseError) as item_type_err: - export(Node([Field("puerco", options={"bayat": [1, UNKNOWN, 3]})])) - item_type_err.match("has unexpected type") diff --git a/tests/test_export_simple.py b/tests/test_export_simple.py deleted file mode 100644 index 873bf9da..00000000 --- a/tests/test_export_simple.py +++ /dev/null @@ -1,31 +0,0 @@ -from hiku.edn import dumps -from hiku.query import Field, Link, Node -from hiku.export.simple import export - - -def check_export(query_obj, data): - assert dumps(export(query_obj)) == data - - -def test_field(): - check_export(Field("foo"), ":foo") - check_export(Field("foo", options={"bar": "baz"}), '(:foo {:bar "baz"})') - - -def test_link(): - check_export(Link("foo", Node([])), "{:foo []}") - check_export( - Link("foo", Node([]), options={"bar": "baz"}), - '{(:foo {:bar "baz"}) []}', - ) - - -def test_node(): - check_export(Node([Field("foo")]), "[:foo]") - - -def test_options(): - check_export( - Node([Field("foo", options={"bar": [1, {"baz": 2}, {3}]})]), - "[(:foo {:bar [1 {:baz 2} #{3}]})]", - ) diff --git a/tests/test_read_protobuf.py b/tests/test_read_protobuf.py deleted file mode 100644 index 9eac3706..00000000 --- a/tests/test_read_protobuf.py +++ /dev/null @@ -1,85 +0,0 @@ -import pytest - -from hiku.query import Node, Field, Link -from hiku.protobuf import query_pb2 as t -from hiku.readers.protobuf import read, transform - - -def check_read(pb_node, expected): - query = read(pb_node.SerializeToString()) - assert query == expected - - -def test_node_field(): - node = t.Node() - item = node.items.add() - item.field.name = "tratan" - check_read(node, Node([Field("tratan")])) - - -def test_node_field_options(): - node = t.Node() - item = node.items.add() - item.field.name = "sprayed" - item.field.options["treason"] = 123 - item.field.options["prizren"] = "stager" - check_read( - node, Node([Field("sprayed", {"treason": 123, "prizren": "stager"})]) - ) - - -def test_link(): - node = t.Node() - link_item = node.items.add() - link_item.link.name = "swaying" - field_item = link_item.link.node.items.add() - field_item.field.name = "pelew" - check_read(node, Node([Link("swaying", Node([Field("pelew")]))])) - - -def test_link_options(): - node = t.Node() - link_item = node.items.add() - link_item.link.name = "dubiety" - link_item.link.options["squat"] = 234 - link_item.link.options["liquid"] = "ravages" - field_item = link_item.link.node.items.add() - field_item.field.name = "gits" - check_read( - node, - Node( - [ - Link( - "dubiety", - Node([Field("gits")]), - {"squat": 234, "liquid": "ravages"}, - ) - ] - ), - ) - - -def test_no_field_name(): - node = t.Node() - item = node.items.add() - item.field.CopyFrom(t.Field()) - with pytest.raises(TypeError) as err: - transform(node) - err.match("Field name is empty") - - -def test_no_link_name(): - node = t.Node() - item = node.items.add() - item.link.CopyFrom(t.Link()) - with pytest.raises(TypeError) as err: - transform(node) - err.match("Link name is empty") - - -def test_no_node_item(): - node = t.Node() - node.items.add() - with pytest.raises(TypeError) as err: - transform(node) - err.match("Node item is empty") diff --git a/tests/test_read_simple.py b/tests/test_read_simple.py deleted file mode 100644 index cfaef531..00000000 --- a/tests/test_read_simple.py +++ /dev/null @@ -1,102 +0,0 @@ -import pytest - -from hiku.query import Node, Field, Link -from hiku.readers.simple import read - - -def check_read(source, query): - first = read(source) - assert first == query - - -def test_invalid_root(): - with pytest.raises(TypeError): - read("{:foo []}") - with pytest.raises(TypeError): - read(":foo") - - -def test_field(): - check_read( - """ - [:foo :bar] - """, - Node([Field("foo"), Field("bar")]), - ) - - -def test_field_invalid(): - with pytest.raises(TypeError): - read('["foo"]') - with pytest.raises(TypeError): - read("[1]") - - -def test_field_options(): - check_read( - """ - [(:foo {:bar 1}) :baz] - """, - Node([Field("foo", options={"bar": 1}), Field("baz")]), - ) - - -def test_field_invalid_options(): - # missing options - with pytest.raises(TypeError): - read("[(:foo)]") - - # invalid options type - with pytest.raises(TypeError): - read("[(:foo :bar)]") - - # more arguments than expected - with pytest.raises(TypeError): - read("[(:foo 1 2)]") - - # invalid option key - with pytest.raises(TypeError): - read("[(:foo {1 2})]") - - -def test_link(): - check_read( - """ - [{:foo [:bar :baz]}] - """, - Node([Link("foo", Node([Field("bar"), Field("baz")]))]), - ) - - -def test_link_options(): - check_read( - """ - [{(:foo {:bar 1}) [:baz]}] - """, - Node([Link("foo", Node([Field("baz")]), options={"bar": 1})]), - ) - - -def test_link_invalid(): - with pytest.raises(TypeError): - read('[{"foo" [:baz]}]') - with pytest.raises(TypeError): - read("[{foo [:baz]}]") - - -def test_link_invalid_options(): - # missing options - with pytest.raises(TypeError): - read("[{(:foo) [:baz]}]") - - # invalid options type - with pytest.raises(TypeError): - read("[{(:foo :bar) [:baz]}]") - - # more arguments than expected - with pytest.raises(TypeError): - read("[{(:foo 1 2) [:bar]}]") - - # invalid option key - with pytest.raises(TypeError): - read("[{(:foo {1 2}) [:bar]}]") diff --git a/tests/test_result.py b/tests/test_result.py index 9c4b9e58..2dc52f6a 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -7,7 +7,7 @@ from hiku.types import Record, String, Optional, Sequence, TypeRef, Integer from hiku.graph import Graph, Link, Node, Field, Root from hiku.result import denormalize, Index, Proxy, Reference, ROOT -from hiku.readers.simple import read +from hiku.readers.graphql import read def _(*args): @@ -72,7 +72,18 @@ def _(*args): Root( [ Field("slotted", String, _), - Field("tatler", Optional[Record[{"orudis": String}]], _), + Field( + "tatler", + Optional[ + Record[ + { + "orudis": String, + "jigsaw": Optional[String], + } + ] + ], + _, + ), Field("coom", Record[{"yappers": String}], _), Field( "lovecraft", @@ -113,7 +124,10 @@ def _(*args): INDEX.root.update( { "slotted": "quoy_ushered", - "tatler": {"orudis": "fhp_musterd"}, + "tatler": { + "orudis": "fhp_musterd", + "jigsaw": "dodges_ogham", + }, "coom": {"yappers": "idaho_golok"}, "lovecraft": { "characters": { @@ -214,25 +228,33 @@ def check_result(query_string, result): def test_root_fields(): - check_result("[:slotted]", {"slotted": "quoy_ushered"}) + check_result("{ slotted }", {"slotted": "quoy_ushered"}) def test_root_fields_complex(): - check_result("[{:tatler []}]", {"tatler": {}}) - check_result("[{:tatler [:orudis]}]", {"tatler": {"orudis": "fhp_musterd"}}) - - check_result("[{:coom []}]", {"coom": {}}) - check_result("[{:coom [:yappers]}]", {"coom": {"yappers": "idaho_golok"}}) - - check_result("[{:barbary []}]", {"barbary": [{}]}) + # complex field requrested without selection must return all fields check_result( - "[{:barbary [:betty]}]", {"barbary": [{"betty": "japheth_ophir"}]} + "{ tatler }", + { + "tatler": { + "orudis": "fhp_musterd", + "jigsaw": "dodges_ogham", + } + }, ) + # complex field requrested with selection must return selected fields + check_result("{ tatler { orudis } }", {"tatler": {"orudis": "fhp_musterd"}}) + + check_result("{ coom }", {"coom": {"yappers": "idaho_golok"}}) + check_result("{ coom { yappers } }", {"coom": {"yappers": "idaho_golok"}}) + + check_result("{ barbary }", {"barbary": [{"betty": "japheth_ophir"}]}) + check_result("{ barbary { betty } }", {"barbary": [{"betty": "japheth_ophir"}]}) def test_node_fields(): check_result( - "[{:zareeba [:nerv]} {:crowdie [:doghead]}]", + "{ zareeba { nerv } crowdie { doghead } }", { "zareeba": {"nerv": "calgary_badass"}, "crowdie": [ @@ -244,23 +266,34 @@ def test_node_fields(): def test_node_fields_complex(): - check_result("[{:moujik [{:went []}]}]", {"moujik": {"went": {}}}) + # complex field requrested without selection must return all fields check_result( - "[{:moujik [{:went [:changer]}]}]", + "{ moujik { went } }", {"moujik": {"went": {"changer": "cheerly_jpg"}}} + ) + # complex field requrested with selection must return selected fields + check_result( + "{ moujik { went { changer } } }", {"moujik": {"went": {"changer": "cheerly_jpg"}}}, ) - check_result("[{:moujik [{:atelier []}]}]", {"moujik": {"atelier": {}}}) check_result( - "[{:moujik [{:atelier [:litas]}]}]", + "{ moujik { atelier } }", {"moujik": {"atelier": {"litas": "facula_keck"}}} + ) + check_result( + "{ moujik { atelier { litas } } }", {"moujik": {"atelier": {"litas": "facula_keck"}}}, ) check_result( - "[{:moujik [{:matwork []}]}]", {"moujik": {"matwork": [{}, {}]}} + "{ moujik { matwork } }", + { + "moujik": { + "matwork": [{"bashaw": "bukhoro_zins"}, {"bashaw": "worms_gemman"}] + } + }, ) check_result( - "[{:moujik [{:matwork [:bashaw]}]}]", + "{ moujik { matwork { bashaw } } }", { "moujik": { "matwork": [ @@ -274,7 +307,7 @@ def test_node_fields_complex(): def test_root_node_links(): check_result( - "[{:flossy [{:daur [:doghead]} {:peafowl [:nerv]}]}]", + "{ flossy { daur { doghead } peafowl { nerv } } }", { "flossy": { "daur": {"doghead": "satsuma_mks"}, @@ -290,8 +323,8 @@ def test_root_node_links(): def test_deep_links(): check_result( """ - [{:zareeba [{:mistic [:panton]} {:biopics [:tamsin]}]} - {:crowdie [{:mistic [:tamsin]} {:biopics [:panton]}]}] + { zareeba { mistic { panton } biopics { tamsin } } + crowdie { mistic { tamsin } biopics { panton } } } """, { "zareeba": { @@ -323,12 +356,18 @@ def test_deep_links(): def test_circle_links(): check_result( - """ - [{:zareeba [{:mistic [{:bahut [:nerv]}]}]} - {:zareeba [{:mistic [{:paramo [:nerv]}]}]} - {:zareeba [{:biopics [{:bahut [:nerv]}]}]} - {:zareeba [{:biopics [{:paramo [:nerv]}]}]}] - """, + """{ + zareeba { + mistic { + bahut { nerv } + paramo { nerv } + } + biopics { + bahut { nerv } + paramo { nerv } + } + } + }""", { "zareeba": { # cosies 2 "mistic": { # kir 5 @@ -361,22 +400,22 @@ def test_circle_links(): def test_optional(): check_result( - "[{:flossy [{:daur [:doghead]} {:carf [:nerv]}]}]", + "{ flossy { daur { doghead } carf { nerv } } }", {"flossy": {"daur": {"doghead": "satsuma_mks"}, "carf": None}}, ) def test_nested_records(): check_result( - "[{:rlyeh [{:priest [:name]}]}]", + "{ rlyeh { priest { name } } }", {"rlyeh": {"priest": {"name": "Cthulhu"}}}, ) check_result( - "[{:lovecraft [{:characters [{:cthulhu [:name]}]}]}]", + "{ lovecraft { characters { cthulhu { name } } } }", {"lovecraft": {"characters": {"cthulhu": {"name": "Cthulhu"}}}}, ) check_result( - "[{:elemental [{:air [:name]} {:water [:name :stories]}]}]", + " { elemental { air { name } water { name stories } } }", { "elemental": { "air": [], diff --git a/tests/test_source_graph.py b/tests/test_source_graph.py index 3216d5fe..627acd75 100644 --- a/tests/test_source_graph.py +++ b/tests/test_source_graph.py @@ -10,7 +10,7 @@ from hiku.builder import build, Q from hiku.expr.core import define, S, each from hiku.sources.graph import SubGraph -from hiku.readers.simple import read +from hiku.readers.graphql import read from hiku.executors.sync import SyncExecutor from hiku.executors.threads import ThreadsExecutor @@ -161,9 +161,7 @@ def buz(x, size): sg_x.c(buz(S.this, S.size)), options=[Option("size", None)], ), - Field( - "with_option", None, query_x1, options=[Option("opt", None)] - ), + Field("with_option", None, query_x1, options=[Option("opt", None)]), ], ), Node( @@ -172,9 +170,7 @@ def buz(x, size): Field("id", None, sg_y), Field("c", None, sg_y), Field("f", None, sg_y.c(S.f2)), - Field( - "foo", None, sg_y.c(each(S.x, S.this.xs, foo(S.x, S.this))) - ), + Field("foo", None, sg_y.c(each(S.x, S.this.xs, foo(S.x, S.this)))), Field("bar", None, sg_y.c(each(S.x, S.this.xs, bar(S.x)))), Field("baz", None, sg_y.c(baz(S.this))), ], @@ -204,7 +200,7 @@ def execute(engine, graph, query): def test_field(engine, graph): - result = execute(engine, graph, read("[{:x1s [:a :f]}]")) + result = execute(engine, graph, read("{ x1s { a f } }")) check_result( result, { @@ -218,7 +214,7 @@ def test_field(engine, graph): def test_field_options(engine, graph): - result = execute(engine, graph, read('[{:x1s [(:buz {:size "100"})]}]')) + result = execute(engine, graph, read('{ x1s { buz(size: "100") } }')) check_result( result, { @@ -232,7 +228,7 @@ def test_field_options(engine, graph): def test_field_without_options(engine, graph): - result = execute(engine, graph, read("[{:x1s [:buz]}]")) + result = execute(engine, graph, read("{ x1s { buz } }")) check_result( result, { @@ -247,12 +243,12 @@ def test_field_without_options(engine, graph): def test_field_without_required_option(engine, graph): with pytest.raises(TypeError) as err: - execute(engine, graph, read("[{:x1s [:buz3]}]")) + execute(engine, graph, read("{ x1s { buz3 } }")) err.match('^Required option "size" for (.*)buz3(.*) was not provided$') def test_field_option_defaults(engine, graph): - result = execute(engine, graph, read("[{:x1s [:buz2]}]")) + result = execute(engine, graph, read("{ x1s { buz2 } }")) check_result( result, { @@ -263,7 +259,7 @@ def test_field_option_defaults(engine, graph): ] }, ) - result = execute(engine, graph, read("[{:x1s [(:buz2 {:size 200})]}]")) + result = execute(engine, graph, read("{ x1s { buz2(size: 200) } }")) check_result( result, { @@ -277,7 +273,7 @@ def test_field_option_defaults(engine, graph): def test_sequence_in_arg_type(engine, graph): - result = execute(engine, graph, read("[{:x1s [:baz]}]")) + result = execute(engine, graph, read("{ x1s { baz } }")) check_result( result, { @@ -288,7 +284,7 @@ def test_sequence_in_arg_type(engine, graph): ] }, ) - result = execute(engine, graph, read("[{:y1s [:baz]}]")) + result = execute(engine, graph, read("{ y1s { baz } }")) check_result( result, { @@ -305,7 +301,7 @@ def test_mixed_query(engine, graph): result = execute( engine, graph, - read("[{:x1s [(:with_option {:opt 123}) :a]}]"), + read("{ x1s { with_option(opt: 123) a } }"), ) check_result( result, diff --git a/tests/test_typedef_kinko.py b/tests/test_typedef_kinko.py deleted file mode 100644 index 00245929..00000000 --- a/tests/test_typedef_kinko.py +++ /dev/null @@ -1,305 +0,0 @@ -import difflib -from textwrap import dedent - -from hiku.graph import Graph, Node, Field, Link, Root -from hiku.types import Sequence, Mapping, Integer, String, Optional, Record -from hiku.types import TypeRef -from hiku.typedef.kinko import dumps - - -def _(*args, **kwargs): - raise NotImplementedError - - -def assert_dumps(root, schema): - first = dumps(root) - second = dedent(schema).strip() + "\n" - if first != second: - msg = "Dumped schema mismatches:\n\n{}".format( - "\n".join(difflib.ndiff(first.splitlines(), second.splitlines())) - ) - raise AssertionError(msg) - - -def test_field(): - assert_dumps( - Graph( - [ - Root( - [ - Field("leones", String, _), - ] - ) - ] - ), - """ - type leones String - """, - ) - - -def test_field_complex(): - assert_dumps( - Graph( - [ - Root( - [ - Field( - "behave", Optional[Record[{"burieth": Integer}]], _ - ), - Field("gemara", Record[{"trevino": Integer}], _), - Field( - "riffage", Sequence[Record[{"shophar": Integer}]], _ - ), - ] - ) - ] - ), - """ - type behave - Option - Record - :burieth Integer - - type gemara - Record - :trevino Integer - - type riffage - List - Record - :shophar Integer - """, - ) - - -def test_node(): - assert_dumps( - Graph( - [ - Node( - "adder", - [ - Field("kott", String, _), - Field("aseptic", String, _), - ], - ), - Node( - "brayden", - [ - Field("unhot", String, _), - Field("linea", String, _), - ], - ), - ] - ), - """ - type adder - Record - :kott String - :aseptic String - - type brayden - Record - :unhot String - :linea String - """, - ) - - -def test_list_simple(): - assert_dumps( - Graph( - [ - Root( - [ - Field("askest", Sequence[Integer], _), - ] - ) - ] - ), - """ - type askest - List Integer - """, - ) - - -def test_list_complex(): - assert_dumps( - Graph( - [ - Root( - [ - Field("gladden", Sequence[Sequence[Integer]], _), - ] - ) - ] - ), - """ - type gladden - List - List Integer - """, - ) - - -def test_dict_simple(): - assert_dumps( - Graph( - [ - Root( - [ - Field("kasseri", Mapping[String, Integer], _), - ] - ) - ] - ), - """ - type kasseri - Dict String Integer - """, - ) - - -def test_dict_complex(): - assert_dumps( - Graph( - [ - Root( - [ - Field( - "trunks", - Mapping[String, Mapping[Integer, Integer]], - _, - ), - ] - ) - ] - ), - """ - type trunks - Dict String - Dict Integer Integer - """, - ) - - -def test_type_ref(): - assert_dumps( - Graph( - [ - Node( - "xeric", - [ - Field("derrida", String, _), - ], - ), - Node( - "amb", - [ - Field("loor", String, _), - Link("cressy", TypeRef["xeric"], _, requires=None), - ], - ), - Node( - "offeree", - [ - Field("abila", String, _), - Link( - "ferber", - Sequence[TypeRef["xeric"]], - _, - requires=None, - ), - ], - ), - ] - ), - """ - type xeric - Record - :derrida String - - type amb - Record - :loor String - :cressy xeric - - type offeree - Record - :abila String - :ferber - List xeric - """, - ) - - -def testDocs(): - assert_dumps( - Graph( - [ - Node( - "switzer", - [ - Field( - "beatch", String, _, description="attribute beatch" - ), - ], - description="switzer description", - ), - Node( - "trine", - [ - Field( - "propels", - Optional[String], - _, - description="attribute propels", - ), - Link( - "cardura", - TypeRef["switzer"], - _, - requires=None, - description="link cardura to switzer", - ), - ], - description="trine description", - ), - Node( - "packrat", - [ - Field( - "pikes", String, _, description="attribute pikes" - ), - Link( - "albus", - Sequence[TypeRef["switzer"]], - _, - requires=None, - description="link albus to switzer", - ), - ], - description="packrat description", - ), - ] - ), - """ - type switzer ; switzer description - Record - :beatch String ; attribute beatch - - type trine ; trine description - Record - :propels ; attribute propels - Option String - :cardura switzer ; link cardura to switzer - - type packrat ; packrat description - Record - :pikes String ; attribute pikes - :albus ; link albus to switzer - List switzer - """, - )