Spaces:
Runtime error
Runtime error
| #References : | |
| #https://medium.com/@tahreemrasul/building-a-chatbot-application-with-chainlit-and-langchain-3e86da0099a6 | |
| #https://github.com/ArjunAranetaCodes/Python-DataScience-AI/blob/main/Chain-LitChat-Mistral7b/app.py | |
| from langchain_openai import ChatOpenAI | |
| from langchain.chains import LLMChain | |
| #from langchain_huggingface import HuggingFaceEndpoint | |
| from prompts import maths_assistant_prompt_template | |
| from langchain.memory.buffer import ConversationBufferMemory | |
| from dotenv import load_dotenv | |
| import os | |
| import chainlit as cl | |
| # Load environment variables from .env file | |
| api_key = os.getenv('OPENAI_API_KEY') | |
| print(f"api key is {api_key}") | |
| async def start_llm(): | |
| print("Initializing llm...") | |
| llm = ChatOpenAI(model='gpt-4o-mini', | |
| temperature=0.5, api_key = api_key) | |
| #model_id = "mistralai/Mixtral-8x7B-Instruct-v0.1" | |
| #llm = HuggingFaceEndpoint( | |
| # repo_id=model_id, max_length=2000, temperature=0.5, token=os.getenv('HF_READ_TOKEN') | |
| #) | |
| print("llm initialized!") | |
| conversation_memory = ConversationBufferMemory(memory_key="chat_history", | |
| max_len=50, | |
| return_messages=True, | |
| ) | |
| llm_chain = LLMChain(llm=llm, prompt=maths_assistant_prompt_template, memory=conversation_memory) | |
| cl.user_session.set("llm_chain", llm_chain) | |
| #Send initial message to the user | |
| #await cl.Message("What is your topic of interest?").send() | |
| # Send initial message with selectable buttons | |
| actions = [ | |
| cl.Action(name="Probability", value="Probability", description="Select Quiz Topic!"), | |
| cl.Action(name="Linear Algebra", value="Linear Algebra", description="Select Quiz Topic!"), | |
| cl.Action(name="Accounts", value="Accounts", description="Select Quiz Topic!"), | |
| cl.Action(name="Calculus", value="Calculus", description="Select Quiz Topic!") | |
| ] | |
| await cl.Message(content="**Pick a Topic and Let the Quiz Adventure Begin!** ππ", actions=actions).send() | |
| async def query_llm(message: cl.Message): | |
| llm_chain = cl.user_session.get("llm_chain") | |
| #selected_topic = cl.user_session.get("selected_topic", "probability") # Default to probability if not set | |
| print("Message being sent to the LLM is") | |
| print(message.content) | |
| #response = await llm_chain.ainvoke(message.content, | |
| # callbacks=[ | |
| # cl.AsyncLangchainCallbackHandler()]) | |
| response = await llm_chain.ainvoke({ | |
| "chat_history": llm_chain.memory.load_memory_variables({})["chat_history"], | |
| "question": message.content | |
| }, callbacks=[ | |
| cl.AsyncLangchainCallbackHandler() | |
| ]) | |
| await cl.Message(response["text"]).send() | |
| async def send_good_luck_message(): | |
| await cl.Message(content="Good luck! π", align="bottom").send() | |
| async def handle_topic_selection(action: cl.Action): | |
| llm_chain = cl.user_session.get("llm_chain") | |
| #cl.user_session.set("selected_topic", action.value) | |
| #await cl.Message(content=f"Selected {action.value}").send() | |
| response = await llm_chain.ainvoke({ | |
| "chat_history": llm_chain.memory.load_memory_variables({})["chat_history"], | |
| "question": f"Quiz me on the topic {action.value}." | |
| }, callbacks=[ | |
| cl.AsyncLangchainCallbackHandler() | |
| ]) | |
| await cl.Message(response["text"]).send() | |
| async def on_action(action: cl.Action): | |
| await handle_topic_selection(action) | |
| async def on_action(action: cl.Action): | |
| await handle_topic_selection(action) | |
| async def on_action(action: cl.Action): | |
| await handle_topic_selection(action) | |
| async def on_action(action: cl.Action): | |
| await handle_topic_selection(action) | |