From 09c044fdfbf10f916f66edc760340bfdfd9314dc Mon Sep 17 00:00:00 2001 From: Markus Alexander Kuppe Date: Wed, 21 Feb 2024 13:36:07 -0800 Subject: [PATCH] Add gen and test grammar function. --- controllers/pyctrl/samples/apalache.py | 35 ++++++++++++++++++++++++-- 1 file changed, 33 insertions(+), 2 deletions(-) diff --git a/controllers/pyctrl/samples/apalache.py b/controllers/pyctrl/samples/apalache.py index 4c7e1556..818ea473 100644 --- a/controllers/pyctrl/samples/apalache.py +++ b/controllers/pyctrl/samples/apalache.py @@ -1,4 +1,3 @@ -import sys import pyaici.server as aici # note that VSCode syntax highlighting is not perfect r""" ... """ (it assumes regexp) @@ -77,7 +76,39 @@ """ -# async def gen_and_test_grammar(): +async def gen_and_test_grammar(): + await aici.FixedTokens( + """ + ---- MODULE Counter ---- + VARIABLE + b + + Init == b = TRUE + Next == b' = FALSE + ==== + + The type of the variable declaration of b in the following TLA+ spec is: + """ + ) + tokens = await aici.gen_tokens(yacc=apalache, max_tokens=10, store_var="b") + print(tokens) + + ## No need to tokenize the input, as it is already tokenized?! + # tokens = aici.tokenize(input) + constr = aici.CfgConstraint(apalache) + outp = [] + for t in tokens: + print(f"Testing token: {t}") + if not constr.token_allowed(t): + ## Abort/terminate if token is not allowed. + print(f"Token {t} not allowed") + print("OK: " + repr(aici.detokenize(outp).decode("utf-8", "ignore"))) + print("fail: " + repr(aici.detokenize([t]).decode("utf-8", "ignore"))) + break + outp.append(t) + constr.append_token(t) + print(f"EOS allowed: {constr.eos_allowed()}") + async def test_grammar(): await aici.FixedTokens("Start")