Skip to content

Commit

Permalink
[fix] [#149] do not merge/extract fragment fields into node fields if…
Browse files Browse the repository at this point in the history
… there is no such field in node

There was a part of algo in nodes merging which will extract fields from fragment if same fields exist in node - this is to prevent same field to be resolved multiple times. The simplest example is `{ user { id ... on User { id } } }` would become `{ user { id } }` so `id` field will be resolved only once.

But previous algo (before this fix) would merge eagerly, so even if no field exits in not, algo will try to eliminate fragment at all, for example `{ user { ... on User { id } } }` would become `{ user { id } }` - fragment is eliminated.

This behavior breaks union queries and we must not touch union fragment, since this breaks unions. This fix tells algo to merge less eagerly, thas is - if fragment field not exist in node, we leave this field in fragment since there is nothing to optimize, no fields duplication will occur.

Note that although we do not merge fragment field to node fields, we still merge
fields for fragment field (see _merge_link).
  • Loading branch information
m.kindritskiy committed Jun 5, 2024
1 parent 9906bcc commit 7fd0f95
Show file tree
Hide file tree
Showing 3 changed files with 172 additions and 53 deletions.
53 changes: 30 additions & 23 deletions hiku/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,6 @@ def _merge(
link_directives: t.DefaultDict[t.Tuple, t.List] = defaultdict(list)
to_merge = OrderedDict()
fields_iter = chain.from_iterable(e.fields for e in nodes)
fragments_iter = chain.from_iterable(e.fragments for e in nodes)

for field in fields_iter:
key = field_key(field)
Expand All @@ -309,31 +308,39 @@ def _merge(
yield field

if not visited_fields and not to_merge:
for fr in fragments_iter:
for fr in chain.from_iterable(e.fragments for e in nodes):
yield fr
else:
for fr in fragments_iter:
fr_fields = []
for field in fr.node.fields:
key = (field.name, field.options_hash, field.alias)

if field.__class__ is Link:
field = t.cast(Link, field)
if key not in to_merge:
to_merge[key] = [field.node]
links[key] = field
for node in nodes:
for fr in node.fragments:
fr_fields = []
for field in fr.node.fields:
key = (field.name, field.options_hash, field.alias)

if field.__class__ is Link:
# if fragment field not exists in node fields, we
# can skip merging it with node fields and just
# leave it in a fragment
if field.name not in node.fields_map:
fr_fields.append(field)
continue

field = t.cast(Link, field)
if key not in to_merge:
to_merge[key] = [field.node]
links[key] = field
else:
to_merge[key].append(field.node)
link_directives[key].extend(field.directives)
else:
to_merge[key].append(field.node)
link_directives[key].extend(field.directives)
else:
if key not in visited_fields:
fr_fields.append(field)

fr_key = (fr.type_name, tuple(field_key(f) for f in fr_fields))
if fr_key not in visited_fragments:
visited_fragments.add(fr_key)
if fr_fields:
yield Fragment(fr.type_name, fr_fields)
if key not in visited_fields:
fr_fields.append(field)

fr_key = (fr.type_name, tuple(field_key(f) for f in fr_fields))
if fr_key not in visited_fragments:
visited_fragments.add(fr_key)
if fr_fields:
yield Fragment(fr.type_name, fr_fields)

for key, values in to_merge.items():
link = links[key]
Expand Down
146 changes: 117 additions & 29 deletions tests/test_read_graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def test_mutation_operation():
)


def test_named_fragments():
def test_named_fragments() -> None:
check_read(
"""
query Juger {
Expand Down Expand Up @@ -221,43 +221,55 @@ def test_named_fragments():
[
Field("flowers"),
Field("apres"),
Link(
"pins",
Node(
[
Field("gunya"),

Link(
"kilned",
Node(
[
Field("rusk"),
]
),
),
], [],
),
),

], [
Fragment('Makai', [
Field("doozie"),
Link(
"pins",
Node(
[
Field("gunya"),

], [
Fragment(
'Torsion',
[

Link(
"kilned",
Node(
[
Field("rusk"),
]
),
),
]
),
],
),
),
]),
]
),
options={"gire": "noatak"},
),

Link(
"movies",
Node(
[
Field("boree"),
]
),
),
],
[]
[
Fragment(
"Valium",
[
Link(
"movies",
Node(
[
Field("boree"),
]
),
),
],
),
]
),
),
]
Expand Down Expand Up @@ -719,7 +731,83 @@ def test_parse_interface_with_one_fragment():
)


def test_merge_node_with_fragment_on_node():
def test_merge_node_with_fragment_on_node() -> None:
"""When parsing query, hiku must merge fields from fragment with fields at same level, and only leave in unique fields in fragment.
This is done to avoid fetching same field twice (can we fix this by caching execution instead of query rewrites ?)
For example in test query, we have context.user selection set and
context.ContextFragment.user selection set. Since user.id fields exists in both
parts of the query, we must `merge` user.id and only leave user.email in fragment
From what I can see, we can recurcively parse and merge query fragments:
- first merge ContextFragment.UserFragment into ContextFragment
like ... on Context {
user {
id
email
}
}
- then merge user and UserFragment
like user {
id
name
email
}
- then go to level up and merge ContextFragment.user into user
like user {
id
name
email
}
We basically can eliminate fragments at all
"""
check_read(
"""
query GetContext {
context {
user {
id
name
... on User {
id
email
}
}
... on Context {
user {
... on User {
id
email
}
}
}
}
}
""",

Node(
[
Link(
"context",
Node([
Link("user", Node([
Field("id"),
Field("name"),
], [
Fragment('User', [
Field("email"),
]),
])),
], []),
)
]
),
)



def test_merge_fragment_for_union() -> None:
"""We do not know if fragments are for unions or not when we parsing query"""
check_read(
"""
query GetContext {
Expand Down
26 changes: 25 additions & 1 deletion tests/test_union.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ def maybe_get_media():
Node('Audio', [
Field('id', Integer, resolve_audio_fields),
Field('duration', String, resolve_audio_fields),
Link('user', TypeRef['User'], link_user, requires=None),
]),
Node('Video', [
Field('id', Integer, resolve_video_fields),
Expand Down Expand Up @@ -330,7 +331,7 @@ def test_query_only_typename():
}


def test_validate_query_can_not_contain_shared_fields():
def test_validate_query_can_not_contain_shared_fields_in_union():
query = """
query SearchMedia($text: String) {
searchMedia(text: $text) {
Expand Down Expand Up @@ -395,3 +396,26 @@ def test_validate_union_type_field_has_no_such_option():
assert errors == [
'Unknown options for "Audio.duration": size',
]


def test_validate_query_can_contain_shared_links():
# TODO: the problem here is probably because of fragment merging algorythm
query = """
query SearchMedia($text: String) {
searchMedia(text: $text) {
__typename
... on Audio {
duration
user {
id
}
}
... on Video {
thumbnailUrl(size: 100)
}
}
}
"""

errors = validate(GRAPH, read(query, {'text': 'foo'}))
assert not errors

0 comments on commit 7fd0f95

Please sign in to comment.