-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.py
62 lines (49 loc) · 1.48 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import os
from langchain import PromptTemplate
from langchain import HuggingFaceHub, LLMChain
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ.get('HUGGINGFACEHUB_TOKEN')
template = """Question: {question}
Answer: """
prompt = PromptTemplate(
template=template,
input_variables=['question']
)
# user question
question = "Which NFL team won the Super Bowl in the 2010 season?"
# initialize Hub LLM
hub_llm = HuggingFaceHub(
repo_id='google/flan-t5-xl',
model_kwargs={'temperature':1e-10}
)
# create prompt template > LLM chain
llm_chain = LLMChain(
prompt=prompt,
llm=hub_llm
)
# ask the user question about NFL 2010
print(llm_chain.run(question))
qs = [
{'question': "Which NFL team won the Super Bowl in the 2010 season?"},
{'question': "If I am 6 ft 4 inches, how tall am I in centimeters?"},
{'question': "Who was the 12th person on the moon?"},
{'question': "How many eyes does a blade of grass have?"}
]
res = llm_chain.generate(qs)
res
multi_template = """Answer the following questions one at a time.
Questions:
{questions}
Answers:
"""
long_prompt = PromptTemplate(template=multi_template, input_variables=["questions"])
llm_chain = LLMChain(
prompt=long_prompt,
llm=hub_llm
)
qs_str = (
"Which NFL team won the Super Bowl in the 2010 season?\n" +
"If I am 6 ft 4 inches, how tall am I in centimeters?\n" +
"Who was the 12th person on the moon?" +
"How many eyes does a blade of grass have?"
)
print(llm_chain.run(qs_str))