Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -76,6 +76,7 @@ class RAGSystem:
|
|
| 76 |
self.tokenizer = None
|
| 77 |
self.model = None
|
| 78 |
self.is_initialized = False
|
|
|
|
| 79 |
|
| 80 |
def initialize_model(self):
|
| 81 |
"""Initialize the base model and tokenizer."""
|
|
@@ -134,10 +135,20 @@ class RAGSystem:
|
|
| 134 |
"""Process uploaded documents and update the vector store."""
|
| 135 |
try:
|
| 136 |
documents = []
|
|
|
|
|
|
|
|
|
|
| 137 |
for file in files:
|
| 138 |
-
|
| 139 |
-
|
|
|
|
|
|
|
|
|
|
| 140 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 141 |
if not documents:
|
| 142 |
raise ValueError("No documents were successfully loaded.")
|
| 143 |
|
|
@@ -225,9 +236,10 @@ def process_response(user_input: str, chat_history: List, files: List) -> tuple:
|
|
| 225 |
if not rag_system.is_initialized:
|
| 226 |
rag_system.initialize_model()
|
| 227 |
|
| 228 |
-
|
|
|
|
| 229 |
rag_system.process_documents(files)
|
| 230 |
-
|
| 231 |
response = rag_system.generate_response(user_input)
|
| 232 |
|
| 233 |
# Clean and format response
|
|
@@ -345,8 +357,14 @@ try:
|
|
| 345 |
def submit(user_input, chat_history, files):
|
| 346 |
return process_response(user_input, chat_history, files)
|
| 347 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 348 |
message.submit(submit, [message, chatbot, files], [chatbot])
|
| 349 |
-
clear.click(
|
| 350 |
|
| 351 |
logger.info("Gradio interface created successfully")
|
| 352 |
demo.launch()
|
|
|
|
| 76 |
self.tokenizer = None
|
| 77 |
self.model = None
|
| 78 |
self.is_initialized = False
|
| 79 |
+
self.processed_files = set() # Mantener registro de archivos procesados
|
| 80 |
|
| 81 |
def initialize_model(self):
|
| 82 |
"""Initialize the base model and tokenizer."""
|
|
|
|
| 135 |
"""Process uploaded documents and update the vector store."""
|
| 136 |
try:
|
| 137 |
documents = []
|
| 138 |
+
new_files = []
|
| 139 |
+
|
| 140 |
+
# Procesar solo archivos nuevos
|
| 141 |
for file in files:
|
| 142 |
+
if file.name not in self.processed_files:
|
| 143 |
+
docs = DocumentLoader.load_file(file.name)
|
| 144 |
+
documents.extend(docs)
|
| 145 |
+
new_files.append(file.name)
|
| 146 |
+
self.processed_files.add(file.name)
|
| 147 |
|
| 148 |
+
if not new_files:
|
| 149 |
+
logger.info("No new documents to process")
|
| 150 |
+
return
|
| 151 |
+
|
| 152 |
if not documents:
|
| 153 |
raise ValueError("No documents were successfully loaded.")
|
| 154 |
|
|
|
|
| 236 |
if not rag_system.is_initialized:
|
| 237 |
rag_system.initialize_model()
|
| 238 |
|
| 239 |
+
# Siempre procesar documentos si hay archivos nuevos
|
| 240 |
+
if files:
|
| 241 |
rag_system.process_documents(files)
|
| 242 |
+
|
| 243 |
response = rag_system.generate_response(user_input)
|
| 244 |
|
| 245 |
# Clean and format response
|
|
|
|
| 357 |
def submit(user_input, chat_history, files):
|
| 358 |
return process_response(user_input, chat_history, files)
|
| 359 |
|
| 360 |
+
def clear_context():
|
| 361 |
+
# Limpiar el historial y reiniciar el sistema
|
| 362 |
+
rag_system.vector_store = None
|
| 363 |
+
rag_system.processed_files.clear()
|
| 364 |
+
return None
|
| 365 |
+
|
| 366 |
message.submit(submit, [message, chatbot, files], [chatbot])
|
| 367 |
+
clear.click(clear_context, None, chatbot)
|
| 368 |
|
| 369 |
logger.info("Gradio interface created successfully")
|
| 370 |
demo.launch()
|