aaryan3781 commited on
Commit
866bf76
·
1 Parent(s): 9a51208

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -106,7 +106,7 @@ def Bm25(raw_text_list,input_text,n) :
106
  top_5_results = bm25.get_top_n(tokenized_query, corpus, n=n)
107
  results = '\n'.join(top_5_results)
108
  combined_input = "query = " + input_text + "\n\n For the above query these are some results from a search engine: \n ".join(results) + "\n\n Give detailed and brief answer for the query write in 500-1000 words. Give detailed and well informative answer(include calculations if needed, using tables and other styles of structuring is optional for better answering ) "
109
- return combined_input
110
 
111
  def llm_route(llm):
112
  openai_api_key = "EMPTY"
@@ -190,7 +190,7 @@ def function2(input_text,one_shot_example,llm):
190
  def function3(input_text,one_shot_example,n,llm):
191
  n=int(n)
192
  k=search_results(input_text)
193
- k=Bm25(k,input_text,n)
194
  if llm=="GPT-4":
195
  model = AzureChatOpenAI(
196
  openai_api_base=BASE_URL,
@@ -243,7 +243,7 @@ def function3(input_text,one_shot_example,n,llm):
243
  stop=None, # Optional: specify a stop sequence
244
  temperature=0.7 # Adjust the creativity of the response
245
  )
246
- return completion.choices[0].text.strip()
247
 
248
 
249
 
@@ -256,6 +256,7 @@ def function4(input_text, one_shot_example, n,llm):
256
  k=""
257
  for doc in sorted_docs[:n]:
258
  k+=doc
 
259
  if llm=="GPT-4":
260
  model = AzureChatOpenAI(
261
  openai_api_base=BASE_URL,
@@ -307,7 +308,7 @@ def function4(input_text, one_shot_example, n,llm):
307
  stop=None, # Optional: specify a stop sequence
308
  temperature=0.7 # Adjust the creativity of the response
309
  )
310
- return completion.choices[0].text.strip()
311
 
312
 
313
 
@@ -339,7 +340,7 @@ iface3 = gr.Interface(
339
  gr.Dropdown(choices=dropdown_options, label="Number of top search results"),
340
  gr.Dropdown(choices=llm_dropdown, label="LLM")
341
  ],
342
- outputs="text"
343
  )
344
  iface4 = gr.Interface(
345
  function4,
@@ -349,7 +350,7 @@ iface4 = gr.Interface(
349
  gr.Dropdown(choices=dropdown_options_4, label="Number of top k documents"),
350
  gr.Dropdown(choices=llm_dropdown, label="LLM")
351
  ],
352
- outputs="text"
353
  )
354
 
355
  # Create a parallel interface that combines all individual interfaces
 
106
  top_5_results = bm25.get_top_n(tokenized_query, corpus, n=n)
107
  results = '\n'.join(top_5_results)
108
  combined_input = "query = " + input_text + "\n\n For the above query these are some results from a search engine: \n ".join(results) + "\n\n Give detailed and brief answer for the query write in 500-1000 words. Give detailed and well informative answer(include calculations if needed, using tables and other styles of structuring is optional for better answering ) "
109
+ return combined_input,results
110
 
111
  def llm_route(llm):
112
  openai_api_key = "EMPTY"
 
190
  def function3(input_text,one_shot_example,n,llm):
191
  n=int(n)
192
  k=search_results(input_text)
193
+ k,results=Bm25(k,input_text,n)
194
  if llm=="GPT-4":
195
  model = AzureChatOpenAI(
196
  openai_api_base=BASE_URL,
 
243
  stop=None, # Optional: specify a stop sequence
244
  temperature=0.7 # Adjust the creativity of the response
245
  )
246
+ return completion.choices[0].text.strip(),results
247
 
248
 
249
 
 
256
  k=""
257
  for doc in sorted_docs[:n]:
258
  k+=doc
259
+ results=k
260
  if llm=="GPT-4":
261
  model = AzureChatOpenAI(
262
  openai_api_base=BASE_URL,
 
308
  stop=None, # Optional: specify a stop sequence
309
  temperature=0.7 # Adjust the creativity of the response
310
  )
311
+ return completion.choices[0].text.strip(),results
312
 
313
 
314
 
 
340
  gr.Dropdown(choices=dropdown_options, label="Number of top search results"),
341
  gr.Dropdown(choices=llm_dropdown, label="LLM")
342
  ],
343
+ outputs={"LLM Answer": "text", "Google Search Result": "text"}
344
  )
345
  iface4 = gr.Interface(
346
  function4,
 
350
  gr.Dropdown(choices=dropdown_options_4, label="Number of top k documents"),
351
  gr.Dropdown(choices=llm_dropdown, label="LLM")
352
  ],
353
+ outputs={"LLM Answer": "text", "Abstract Search Result": "text"}
354
  )
355
 
356
  # Create a parallel interface that combines all individual interfaces