Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,9 +5,10 @@ from langchain_huggingface import HuggingFaceEmbeddings
|
|
| 5 |
from langchain_community.vectorstores import FAISS
|
| 6 |
from langchain.memory import ConversationBufferMemory
|
| 7 |
from langchain.chains import ConversationalRetrievalChain
|
| 8 |
-
from langchain_groq import ChatGroq
|
| 9 |
from langchain.prompts import ChatPromptTemplate
|
| 10 |
|
|
|
|
| 11 |
from htmlTemplates import css, user_template, get_bot_template, BOT_AVATARS_BASE64
|
| 12 |
|
| 13 |
|
|
@@ -71,7 +72,7 @@ def generate_friendly_response(question, label, llm, selected_source_english):
|
|
| 71 |
return result.content.strip()
|
| 72 |
|
| 73 |
|
| 74 |
-
# ------------------- Handle User Input -------------------
|
| 75 |
def handle_userinput(user_question):
|
| 76 |
if st.session_state.conversation is None:
|
| 77 |
st.warning("منابع هنوز آماده نشدهاند.")
|
|
@@ -104,10 +105,11 @@ def main():
|
|
| 104 |
if "messages" not in st.session_state:
|
| 105 |
st.session_state.messages = []
|
| 106 |
if "llm" not in st.session_state:
|
|
|
|
| 107 |
st.session_state.llm = ChatGroq(
|
| 108 |
model_name="llama-3.1-8b-instant",
|
| 109 |
temperature=0.2,
|
| 110 |
-
api_key=os.environ["GROQ_API_KEY"]
|
| 111 |
)
|
| 112 |
if "selected_source_english" not in st.session_state:
|
| 113 |
st.session_state.selected_source_english = "Khamenei"
|
|
@@ -156,13 +158,11 @@ def main():
|
|
| 156 |
placeholder = st.empty()
|
| 157 |
placeholder.info(f"⏳ در حال بارگذاری منابع {selected_source_persian}...")
|
| 158 |
try:
|
| 159 |
-
# ✅ مدل جایگزین مطمئن و در دسترس
|
| 160 |
embeddings = HuggingFaceEmbeddings(
|
| 161 |
-
model_name="
|
| 162 |
model_kwargs={'trust_remote_code': True},
|
| 163 |
cache_folder="/tmp/hf_cache"
|
| 164 |
)
|
| 165 |
-
|
| 166 |
vector_path = f"Resources/{st.session_state.selected_source_english}/faiss_index"
|
| 167 |
vectorstore = FAISS.load_local(
|
| 168 |
vector_path,
|
|
|
|
| 5 |
from langchain_community.vectorstores import FAISS
|
| 6 |
from langchain.memory import ConversationBufferMemory
|
| 7 |
from langchain.chains import ConversationalRetrievalChain
|
| 8 |
+
from langchain_groq import ChatGroq # Changed to import ChatGroq
|
| 9 |
from langchain.prompts import ChatPromptTemplate
|
| 10 |
|
| 11 |
+
# Import the new dynamic template components
|
| 12 |
from htmlTemplates import css, user_template, get_bot_template, BOT_AVATARS_BASE64
|
| 13 |
|
| 14 |
|
|
|
|
| 72 |
return result.content.strip()
|
| 73 |
|
| 74 |
|
| 75 |
+
# ------------------- Handle User Input (Logic Only) -------------------
|
| 76 |
def handle_userinput(user_question):
|
| 77 |
if st.session_state.conversation is None:
|
| 78 |
st.warning("منابع هنوز آماده نشدهاند.")
|
|
|
|
| 105 |
if "messages" not in st.session_state:
|
| 106 |
st.session_state.messages = []
|
| 107 |
if "llm" not in st.session_state:
|
| 108 |
+
# Changed LLM to use Groq API with ChatGroq
|
| 109 |
st.session_state.llm = ChatGroq(
|
| 110 |
model_name="llama-3.1-8b-instant",
|
| 111 |
temperature=0.2,
|
| 112 |
+
api_key=os.environ["GROQ_API_KEY"] # Changed to use GROQ_API_KEY
|
| 113 |
)
|
| 114 |
if "selected_source_english" not in st.session_state:
|
| 115 |
st.session_state.selected_source_english = "Khamenei"
|
|
|
|
| 158 |
placeholder = st.empty()
|
| 159 |
placeholder.info(f"⏳ در حال بارگذاری منابع {selected_source_persian}...")
|
| 160 |
try:
|
|
|
|
| 161 |
embeddings = HuggingFaceEmbeddings(
|
| 162 |
+
model_name="heydariAI/persian-embeddings",
|
| 163 |
model_kwargs={'trust_remote_code': True},
|
| 164 |
cache_folder="/tmp/hf_cache"
|
| 165 |
)
|
|
|
|
| 166 |
vector_path = f"Resources/{st.session_state.selected_source_english}/faiss_index"
|
| 167 |
vectorstore = FAISS.load_local(
|
| 168 |
vector_path,
|