Update app.py
Browse files
app.py
CHANGED
|
@@ -161,7 +161,7 @@ def stream_llm_response(messages):
|
|
| 161 |
stream = client.chat.completions.create(
|
| 162 |
model=LLM_MODEL,
|
| 163 |
messages=messages,
|
| 164 |
-
max_tokens=
|
| 165 |
temperature=0.1,
|
| 166 |
stream=True
|
| 167 |
)
|
|
|
|
| 161 |
stream = client.chat.completions.create(
|
| 162 |
model=LLM_MODEL,
|
| 163 |
messages=messages,
|
| 164 |
+
max_tokens=24096,
|
| 165 |
temperature=0.1,
|
| 166 |
stream=True
|
| 167 |
)
|