Spaces:
Runtime error
Runtime error
Add system prompt, add correct stop tokens
#1
by
Sadmank
- opened
app.py
CHANGED
|
@@ -1,3 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import requests
|
| 3 |
import os
|
|
@@ -86,9 +132,12 @@ def stream_response(message, history, max_tokens, temperature, top_p):
|
|
| 86 |
"messages": messages,
|
| 87 |
"max_tokens": max_tokens,
|
| 88 |
"temperature": temperature,
|
|
|
|
| 89 |
"top_p": top_p,
|
| 90 |
"stream": True,
|
| 91 |
-
"stop": ["
|
|
|
|
|
|
|
| 92 |
}
|
| 93 |
|
| 94 |
debug_print(f"Sending request to API: {API_URL}")
|
|
|
|
| 1 |
+
Hugging Face's logo
|
| 2 |
+
Hugging Face
|
| 3 |
+
Search models, datasets, users...
|
| 4 |
+
Models
|
| 5 |
+
Datasets
|
| 6 |
+
Spaces
|
| 7 |
+
Posts
|
| 8 |
+
Docs
|
| 9 |
+
Solutions
|
| 10 |
+
Pricing
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
Spaces:
|
| 15 |
+
|
| 16 |
+
Sadmank
|
| 17 |
+
/
|
| 18 |
+
reflection
|
| 19 |
+
|
| 20 |
+
private
|
| 21 |
+
|
| 22 |
+
Logs
|
| 23 |
+
App
|
| 24 |
+
Files
|
| 25 |
+
Community
|
| 26 |
+
Settings
|
| 27 |
+
reflection
|
| 28 |
+
/
|
| 29 |
+
app.py
|
| 30 |
+
|
| 31 |
+
Sadmank's picture
|
| 32 |
+
Sadmank
|
| 33 |
+
Update app.py
|
| 34 |
+
6c53f07
|
| 35 |
+
verified
|
| 36 |
+
3 minutes ago
|
| 37 |
+
raw
|
| 38 |
+
|
| 39 |
+
Copy download link
|
| 40 |
+
history
|
| 41 |
+
blame
|
| 42 |
+
edit
|
| 43 |
+
delete
|
| 44 |
+
No virus
|
| 45 |
+
|
| 46 |
+
5.92 kB
|
| 47 |
import gradio as gr
|
| 48 |
import requests
|
| 49 |
import os
|
|
|
|
| 132 |
"messages": messages,
|
| 133 |
"max_tokens": max_tokens,
|
| 134 |
"temperature": temperature,
|
| 135 |
+
"system" : "You are a world-class AI system, capable of complex reasoning and reflection. Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags. If you detect that you made a mistake in your reasoning at any point, correct yourself inside <reflection> tags.",
|
| 136 |
"top_p": top_p,
|
| 137 |
"stream": True,
|
| 138 |
+
"stop": [ "<|start_header_id|>",
|
| 139 |
+
"<|end_header_id|>",
|
| 140 |
+
"<|eot_id|>"] # Add stop sequence
|
| 141 |
}
|
| 142 |
|
| 143 |
debug_print(f"Sending request to API: {API_URL}")
|