From c5386c2d1300f99944d46370e11f20a73fe15571 Mon Sep 17 00:00:00 2001 From: imunnic Date: Thu, 20 Mar 2025 08:02:07 +0100 Subject: [PATCH] =?UTF-8?q?mejora=20de=20programaci=C3=B3n=20de=20indices?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/indices.csv | 2 ++ app/indices.txt | 2 -- app/main.py | 54 +++++++++++++++++++++++++++++++++++++++++----- app/webscrapper.py | 11 +++++----- 4 files changed, 56 insertions(+), 13 deletions(-) create mode 100644 app/indices.csv delete mode 100644 app/indices.txt diff --git a/app/indices.csv b/app/indices.csv new file mode 100644 index 0000000..abc25a6 --- /dev/null +++ b/app/indices.csv @@ -0,0 +1,2 @@ +us-spx-500;S&P 500;15:00;22:00 +spain-35;IBEX 35;09:00;18:00 \ No newline at end of file diff --git a/app/indices.txt b/app/indices.txt deleted file mode 100644 index e405189..0000000 --- a/app/indices.txt +++ /dev/null @@ -1,2 +0,0 @@ -us-spx-500 -spain-35 \ No newline at end of file diff --git a/app/main.py b/app/main.py index 7845a5c..4d2188d 100644 --- a/app/main.py +++ b/app/main.py @@ -3,16 +3,56 @@ from contextlib import asynccontextmanager from database import Base, engine from routes import router from apscheduler.schedulers.background import BackgroundScheduler -from webscrapper import search_from_keywords_file, search_from_indices_file +from apscheduler.triggers.cron import CronTrigger +from webscrapper import search_from_keywords_file, search_indice +import csv +from datetime import datetime, time # Crear las tablas en MySQL si no existen Base.metadata.create_all(bind=engine) -# Configurar el scheduler +# Configurar el scheduler (solo una instancia) scheduler = BackgroundScheduler() -scheduler.add_job(search_from_keywords_file, "cron", hour=11, minute=0) #Ejecutar a las 01:00 -scheduler.add_job(search_from_keywords_file, "cron", hour=18, minute=0) #Ejecutar a las 01:00 -scheduler.add_job(search_from_indices_file, "interval", minutes=60) + +# Agregar tareas fijas +scheduler.add_job(search_from_keywords_file, "cron", hour=11, minute=0) # Ejecutar a las 11:00 AM +scheduler.add_job(search_from_keywords_file, "cron", hour=18, minute=0) # Ejecutar a las 6:00 PM + + +def cargar_tareas_desde_csv(indices): + """Lee el CSV y agrega tareas al scheduler""" + try: + with open(indices, mode='r', encoding='utf-8') as file: + reader = csv.reader(file, delimiter=';') + + for row in reader: + if len(row) < 4: # Verifica que haya suficientes columnas + print(f"❌ Línea incorrecta en CSV: {row}") + continue + + url, nombre, horaInicio, horaFin = row # Extrae los valores + + try: + horaInicio = datetime.strptime(horaInicio.strip(), "%H:%M").time() + horaFin = datetime.strptime(horaFin.strip(), "%H:%M").time() + except ValueError: + print(f"❌ Formato de hora incorrecto en línea: {row}") + continue + + obj = {"url": url.strip(), "nombre": nombre.strip(), "horaInicio": horaInicio, "horaFin": horaFin} + + # Programar ejecución solo dentro del rango de horas permitido + for hora in range(horaInicio.hour, horaFin.hour + 1): + scheduler.add_job( + search_indice, + trigger=CronTrigger(hour=hora, minute=0), + args=[obj] + ) + + print("✅ Tareas programadas correctamente.") + + except Exception as e: + print(f"❌ Error al leer el archivo CSV: {e}") @asynccontextmanager @@ -27,3 +67,7 @@ app = FastAPI(lifespan=lifespan) # Incluir rutas app.include_router(router) +# Cargar tareas desde el CSV al iniciar +cargar_tareas_desde_csv("indices.csv") + + diff --git a/app/webscrapper.py b/app/webscrapper.py index bf6246b..d3459ec 100644 --- a/app/webscrapper.py +++ b/app/webscrapper.py @@ -205,13 +205,13 @@ def search_from_indices_file(): logging.info(f"Error al leer el archivo 'indices.txt': {e}") def search_indice(indice): - base_url = f"https://www.investing.com/indices/{indice}" + base_url = f"https://www.investing.com/indices/{indice.url}" try: response = requests.get(base_url, headers=HEADERS) if response.status_code != 200: - logging.info(f"Error al acceder a la página para la consulta '{indice}': {response.status_code}") + logging.info(f"Error al acceder a la página para la consulta '{indice.nombre}': {response.status_code}") return soup = BeautifulSoup(response.content, 'html.parser') @@ -222,11 +222,10 @@ def search_indice(indice): price_change = soup.find("span", {"data-test": "instrument-price-change"}) price_change_percent = soup.find("span", {"data-test": "instrument-price-change-percent"}) porcentaje = price_change_percent.text.strip().replace("(", "").replace(")", "").replace("%", "") - indice_real = chart_inner_title.text.strip() if chart_inner_title else indice if price and price_change and price_change_percent: data = { - "indice": indice_real, + "indice": indice.nombre, "valorActual": price.text.replace(",", "").strip(), # Convertir a número "cambio": price_change.text.replace(",", "").strip(), # Convertir a número "porcentaje": porcentaje # Eliminar paréntesis @@ -236,11 +235,11 @@ def search_indice(indice): response_telegram = requests.post(TELEGRAM_BOT_URL, json=data) if response_telegram.status_code == 200: - logging.info(f"Mensaje enviado a Telegram correctamente para '{indice}'") + logging.info(f"Mensaje enviado a Telegram correctamente para '{indice.nombre}'") else: logging.error(f"Error enviando mensaje a Telegram: {response_telegram.status_code} - {response_telegram.text}") else: - logging.info(f"No se encontraron datos para el índice '{indice}'.") + logging.info(f"No se encontraron datos para el índice '{indice.nombre}'.") except requests.RequestException as e: logging.error(f"Error en la solicitud: {e}")