corriendo y con "automatismo". Api levantada

This commit is contained in:
Your Name
2025-02-11 16:29:05 +01:00
parent 5a0833f3cf
commit 325276253b
8 changed files with 43 additions and 48 deletions

View File

@ -1,30 +1,9 @@
# Instalación
Para instalar la aplicación descargar el proyecto. Una vez descargado, el archivo main.py debe quedar así comentado:
```python
from fastapi import FastAPI
from .database import Base, engine
from .routes import router
# from apscheduler.schedulers.background import BackgroundScheduler
# from .webscrapper import ejecutar_scrapper
# Crear las tablas en MySQL si no existen
Base.metadata.create_all(bind=engine)
# Inicializar FastAPI
app = FastAPI()
# scheduler = BackgroundScheduler()
# Incluir rutas
app.include_router(router)
# @app.on_event("startup")
# def startup_event():
# scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
# scheduler.start()
# @app.on_event("shutdown")
# def shutdown_event():
# scheduler.shutdown()
Para la instalación levantar el contenedor, entrar en la consola de ejecución, y realizar:
```sh
chmod +x /app/auto.sh
cd /app
./auto.sh
```
Proceder con `docker-compose up -d --build`.
Una vez finalizado, descomentar las líneas y realizar de nuevo `docker-compose up -d --build`.
Esto inicia el ciclo de captación de noticias cada 24 horas

View File

@ -5,10 +5,15 @@ WORKDIR ./
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN mkdir app
ENV PYTHONPATH=/app
COPY . ./app
# Asegúrate de que el script auto.sh sea ejecutable
RUN chmod +x /app/auto.sh
RUN ls
CMD ["uvicorn", "app.main:app","--reload", "--host", "0.0.0.0", "--port", "8000"]
# Ejecuta el script auto.sh al iniciar el contenedor, luego ejecuta uvicorn
CMD ["/bin/sh", "-c", "/app/auto.sh && uvicorn app.main:app --reload --host 0.0.0.0 --port 8000"]

11
app/auto.sh Normal file
View File

@ -0,0 +1,11 @@
#!/bin/bash
# auto.sh - Script que ejecuta el scrapper cada 24 horas con un bucle infinito
while true
do
# Ejecutar el script Python
python /app/webscrapper.py
# Dormir por 24 horas (86400 segundos)
sleep 86400
done

View File

@ -1,8 +1,8 @@
from fastapi import FastAPI
from .database import Base, engine
from .routes import router
from database import Base, engine
from routes import router
from apscheduler.schedulers.background import BackgroundScheduler
from .webscrapper import ejecutar_scrapper
#from webscrapper import ejecutar_scrapper
# Crear las tablas en MySQL si no existen
Base.metadata.create_all(bind=engine)
@ -14,11 +14,11 @@ scheduler = BackgroundScheduler()
# Incluir rutas
app.include_router(router)
@app.on_event("startup")
def startup_event():
scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
scheduler.start()
# @app.on_event("startup")
# def startup_event():
# scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
# scheduler.start()
@app.on_event("shutdown")
def shutdown_event():
scheduler.shutdown()
# @app.on_event("shutdown")
# def shutdown_event():
# scheduler.shutdown()

View File

@ -1,6 +1,6 @@
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean
from datetime import datetime
from .database import Base
from database import Base
class NewsItem(Base):
__tablename__ = "news"

View File

@ -1,8 +1,8 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from sqlalchemy.sql import func
from .database import get_db
from .models import NewsItem
from database import get_db
from models import NewsItem
from pydantic import BaseModel
from datetime import datetime
import logging

View File

@ -2,14 +2,14 @@ import requests
from bs4 import BeautifulSoup
import time
from googlenewsdecoder import gnewsdecoder
from .iacorrector import is_security_related, is_critico, is_favorable # Importa la función desde iacorrector.py
from iacorrector import is_security_related, is_critico, is_favorable # Importa la función desde iacorrector.py
from datetime import datetime
import pytz
import logging
from .database import get_db
from database import get_db
from sqlalchemy.orm import Session
from .routes import create_news_item, NewsItemCreate
from .autorsearcher import get_author_from_url
from routes import create_news_item, NewsItemCreate
from autorsearcher import get_author_from_url
# Configuración del logging
LOG_FILE = "app.log"
@ -94,7 +94,7 @@ def search_news(query):
articles = soup.find_all("item")
news_list = []
for article in articles[:10]: # Limitar a los primeros 30 artículos
for article in articles[:1]: # Limitar a los primeros 30 artículos
try:
title = article.title.get_text(strip=True)
content = article.description.get_text(strip=True) if article.description else "Sin descripción"

View File

@ -36,7 +36,7 @@ services:
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
timeout: 5s
retries: 10
retries: 2
# metabase:
# image: metabase/metabase:latest