aaryan3781 commited on
Commit
6eff6f8
·
1 Parent(s): e472af1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -208,7 +208,7 @@ def function3(input_text,one_shot_example,n,llm):
208
  )
209
  ]
210
  )
211
- return generated_answer.content
212
  else:
213
  combined_input = k+f"\n\n Here is a sample question answer pair for reference :\n\n {one_shot_example} "
214
  generated_answer = model(
@@ -218,7 +218,7 @@ def function3(input_text,one_shot_example,n,llm):
218
  )
219
  ]
220
  )
221
- return generated_answer.content
222
  else:
223
  model,client=llm_route(llm)
224
  if len(one_shot_example)==0:
@@ -232,7 +232,7 @@ def function3(input_text,one_shot_example,n,llm):
232
  stop=None, # Optional: specify a stop sequence
233
  temperature=0.7 # Adjust the creativity of the response
234
  )
235
- return completion.choices[0].text.strip()
236
  else:
237
  combined_input = k+f"\n\n Here is a sample question answer pair for reference :\n\n {one_shot_example} "
238
  completion = client.completions.create(
@@ -274,7 +274,7 @@ def function4(input_text, one_shot_example, n,llm):
274
  )
275
  ]
276
  )
277
- return generated_answer.content
278
  else:
279
 
280
  combined_input = f"please provide comprehensive and well-researched responses to the following question. Ensure that the information is up-to-date and includes relevant scientific insights and data \n\n context:{k} \n\n,Below is an example question-answer pair for reference\n\n {one_shot_example} \n\n Now answer this question \n\n question :{input_text}"
@@ -285,7 +285,7 @@ def function4(input_text, one_shot_example, n,llm):
285
  )
286
  ]
287
  )
288
- return generated_answer.content
289
  else:
290
  model,client=llm_route(llm)
291
  if len(one_shot_example)==0:
@@ -298,7 +298,7 @@ def function4(input_text, one_shot_example, n,llm):
298
  stop=None, # Optional: specify a stop sequence
299
  temperature=0.7 # Adjust the creativity of the response
300
  )
301
- return completion.choices[0].text.strip()
302
  else:
303
  completion = client.completions.create(
304
  model=model,
 
208
  )
209
  ]
210
  )
211
+ return generated_answer.content,results
212
  else:
213
  combined_input = k+f"\n\n Here is a sample question answer pair for reference :\n\n {one_shot_example} "
214
  generated_answer = model(
 
218
  )
219
  ]
220
  )
221
+ return generated_answer.content,results
222
  else:
223
  model,client=llm_route(llm)
224
  if len(one_shot_example)==0:
 
232
  stop=None, # Optional: specify a stop sequence
233
  temperature=0.7 # Adjust the creativity of the response
234
  )
235
+ return completion.choices[0].text.strip(),results
236
  else:
237
  combined_input = k+f"\n\n Here is a sample question answer pair for reference :\n\n {one_shot_example} "
238
  completion = client.completions.create(
 
274
  )
275
  ]
276
  )
277
+ return generated_answer.content,results
278
  else:
279
 
280
  combined_input = f"please provide comprehensive and well-researched responses to the following question. Ensure that the information is up-to-date and includes relevant scientific insights and data \n\n context:{k} \n\n,Below is an example question-answer pair for reference\n\n {one_shot_example} \n\n Now answer this question \n\n question :{input_text}"
 
285
  )
286
  ]
287
  )
288
+ return generated_answer.content,results
289
  else:
290
  model,client=llm_route(llm)
291
  if len(one_shot_example)==0:
 
298
  stop=None, # Optional: specify a stop sequence
299
  temperature=0.7 # Adjust the creativity of the response
300
  )
301
+ return completion.choices[0].text.strip(),results
302
  else:
303
  completion = client.completions.create(
304
  model=model,