Spaces:
Sleeping
Sleeping
Commit
·
ed5a9ba
1
Parent(s):
f603a8a
Update app.py
Browse files
app.py
CHANGED
|
@@ -63,6 +63,13 @@ import pandas as pd
|
|
| 63 |
import random
|
| 64 |
import os
|
| 65 |
import csv
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
|
| 67 |
# france credentials
|
| 68 |
BASE_URL = "https://cnerg-gpt-france.openai.azure.com/"
|
|
@@ -178,10 +185,42 @@ def function3(input_text,one_shot_example,n):
|
|
| 178 |
|
| 179 |
|
| 180 |
def function4(input_text, one_shot_example, n):
|
| 181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
|
| 183 |
-
# Your logic for function 4
|
| 184 |
-
return f"Output of Function 4 with input: {input_text}, one shot example: {one_shot_example} and parameter: {parameter}"
|
| 185 |
|
| 186 |
# Define the dropdown options
|
| 187 |
dropdown_options = ["1", "2", "3"]
|
|
|
|
| 63 |
import random
|
| 64 |
import os
|
| 65 |
import csv
|
| 66 |
+
import numpy as np
|
| 67 |
+
import pickle
|
| 68 |
+
from rank_bm25 import BM25Okapi
|
| 69 |
+
from nltk.tokenize import word_tokenize
|
| 70 |
+
loaded_texts = np.load('texts.npy', allow_pickle=True)
|
| 71 |
+
with open('bm25_model.pkl', 'rb') as file:
|
| 72 |
+
bm25 = pickle.load(file)
|
| 73 |
|
| 74 |
# france credentials
|
| 75 |
BASE_URL = "https://cnerg-gpt-france.openai.azure.com/"
|
|
|
|
| 185 |
|
| 186 |
|
| 187 |
def function4(input_text, one_shot_example, n):
|
| 188 |
+
tokenized_query = word_tokenize(input_text.lower())
|
| 189 |
+
doc_scores = bm25.get_scores(tokenized_query)
|
| 190 |
+
sorted_docs = [doc for _, doc in sorted(zip(doc_scores, loaded_texts), reverse=True)]
|
| 191 |
+
n=int(n)
|
| 192 |
+
k=""
|
| 193 |
+
for doc in sorted_docs[:n]:
|
| 194 |
+
k+=doc
|
| 195 |
+
model = AzureChatOpenAI(
|
| 196 |
+
openai_api_base=BASE_URL,
|
| 197 |
+
openai_api_version="2023-05-15",
|
| 198 |
+
deployment_name=DEPLOYMENT_NAME,
|
| 199 |
+
openai_api_key=API_KEY,
|
| 200 |
+
openai_api_type="azure",
|
| 201 |
+
)
|
| 202 |
+
if len(one_shot_example)==0:
|
| 203 |
+
combined_input = f"please provide comprehensive and well-researched responses to the following question. Ensure that the information is up-to-date and includes relevant scientific insights and data , context:{k} \n\n question : {input_text}"
|
| 204 |
+
generated_answer = model(
|
| 205 |
+
[
|
| 206 |
+
HumanMessage(
|
| 207 |
+
content=combined_input
|
| 208 |
+
)
|
| 209 |
+
]
|
| 210 |
+
)
|
| 211 |
+
return generated_answer.content
|
| 212 |
+
else:
|
| 213 |
+
|
| 214 |
+
combined_input = f"please provide comprehensive and well-researched responses to the following question. Ensure that the information is up-to-date and includes relevant scientific insights and data \n\n context:{k} \n\n,Below is an example question-answer pair for reference\n\n {one_shot_example} \n\n Now answer this question \n\n question :{input_text}"
|
| 215 |
+
generated_answer = model(
|
| 216 |
+
[
|
| 217 |
+
HumanMessage(
|
| 218 |
+
content=combined_input
|
| 219 |
+
)
|
| 220 |
+
]
|
| 221 |
+
)
|
| 222 |
+
return generated_answer.content
|
| 223 |
|
|
|
|
|
|
|
| 224 |
|
| 225 |
# Define the dropdown options
|
| 226 |
dropdown_options = ["1", "2", "3"]
|