-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.py
56 lines (42 loc) · 1.52 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import streamlit as st
import openai
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.llms import Ollama
import os
import os
from dotenv import load_dotenv
load_dotenv()
## Langsmith Tracking
os.environ["LANGCHAIN_API_KEY"]=os.getenv("LANGCHAIN_API_KEY")
os.environ["LANGCHAIN_TRACING_V2"]="true"
os.environ["LANGCHAIN_PROJECT"]="Simple Q&A Chatbot With Ollama"
## Prompt Template
prompt=ChatPromptTemplate.from_messages(
[
("system","You are a helpful massistant . Please repsonse to the user queries"),
("user","Question:{question}")
]
)
def generate_response(question,llm,temperature,max_tokens):
llm=Ollama(model=llm)
output_parser=StrOutputParser()
chain=prompt|llm|output_parser
answer=chain.invoke({'question':question})
return answer
## #Title of the app
st.title("Utkarsh's Q&A Chatbot With Ollama")
## Select the OpenAI model
llm=st.sidebar.selectbox("Select Open Source model",["mistral"])
## Adjust response parameter
temperature=st.sidebar.slider("Temperature",min_value=0.0,max_value=1.0,value=0.7)
max_tokens = st.sidebar.slider("Max Tokens", min_value=50, max_value=300, value=150)
## MAin interface for user input
st.write("Goe ahead and ask any question")
user_input=st.text_input("You:")
if user_input :
response=generate_response(user_input,llm,temperature,max_tokens)
st.write(response)
else:
st.write("Please provide the user input")