corriendo y con "automatismo". Api levantada
This commit is contained in:
35
README.md
35
README.md
@ -1,30 +1,9 @@
|
|||||||
# Instalación
|
# Instalación
|
||||||
Para instalar la aplicación descargar el proyecto. Una vez descargado, el archivo main.py debe quedar así comentado:
|
Para la instalación levantar el contenedor, entrar en la consola de ejecución, y realizar:
|
||||||
```python
|
```sh
|
||||||
from fastapi import FastAPI
|
chmod +x /app/auto.sh
|
||||||
from .database import Base, engine
|
cd /app
|
||||||
from .routes import router
|
./auto.sh
|
||||||
# from apscheduler.schedulers.background import BackgroundScheduler
|
|
||||||
# from .webscrapper import ejecutar_scrapper
|
|
||||||
|
|
||||||
# Crear las tablas en MySQL si no existen
|
|
||||||
Base.metadata.create_all(bind=engine)
|
|
||||||
|
|
||||||
# Inicializar FastAPI
|
|
||||||
app = FastAPI()
|
|
||||||
# scheduler = BackgroundScheduler()
|
|
||||||
|
|
||||||
# Incluir rutas
|
|
||||||
app.include_router(router)
|
|
||||||
|
|
||||||
# @app.on_event("startup")
|
|
||||||
# def startup_event():
|
|
||||||
# scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
|
|
||||||
# scheduler.start()
|
|
||||||
|
|
||||||
# @app.on_event("shutdown")
|
|
||||||
# def shutdown_event():
|
|
||||||
# scheduler.shutdown()
|
|
||||||
```
|
```
|
||||||
Proceder con `docker-compose up -d --build`.
|
|
||||||
Una vez finalizado, descomentar las líneas y realizar de nuevo `docker-compose up -d --build`.
|
Esto inicia el ciclo de captación de noticias cada 24 horas
|
||||||
|
@ -5,10 +5,15 @@ WORKDIR ./
|
|||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
RUN mkdir app
|
RUN mkdir app
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
COPY . ./app
|
COPY . ./app
|
||||||
|
|
||||||
|
# Asegúrate de que el script auto.sh sea ejecutable
|
||||||
|
RUN chmod +x /app/auto.sh
|
||||||
|
|
||||||
RUN ls
|
RUN ls
|
||||||
|
|
||||||
CMD ["uvicorn", "app.main:app","--reload", "--host", "0.0.0.0", "--port", "8000"]
|
# Ejecuta el script auto.sh al iniciar el contenedor, luego ejecuta uvicorn
|
||||||
|
CMD ["/bin/sh", "-c", "/app/auto.sh && uvicorn app.main:app --reload --host 0.0.0.0 --port 8000"]
|
||||||
|
|
||||||
|
11
app/auto.sh
Normal file
11
app/auto.sh
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# auto.sh - Script que ejecuta el scrapper cada 24 horas con un bucle infinito
|
||||||
|
|
||||||
|
while true
|
||||||
|
do
|
||||||
|
# Ejecutar el script Python
|
||||||
|
python /app/webscrapper.py
|
||||||
|
|
||||||
|
# Dormir por 24 horas (86400 segundos)
|
||||||
|
sleep 86400
|
||||||
|
done
|
20
app/main.py
20
app/main.py
@ -1,8 +1,8 @@
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from .database import Base, engine
|
from database import Base, engine
|
||||||
from .routes import router
|
from routes import router
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
from .webscrapper import ejecutar_scrapper
|
#from webscrapper import ejecutar_scrapper
|
||||||
|
|
||||||
# Crear las tablas en MySQL si no existen
|
# Crear las tablas en MySQL si no existen
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
@ -14,11 +14,11 @@ scheduler = BackgroundScheduler()
|
|||||||
# Incluir rutas
|
# Incluir rutas
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
|
|
||||||
@app.on_event("startup")
|
# @app.on_event("startup")
|
||||||
def startup_event():
|
# def startup_event():
|
||||||
scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
|
# scheduler.add_job(ejecutar_scrapper, "interval", hours=24)
|
||||||
scheduler.start()
|
# scheduler.start()
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
# @app.on_event("shutdown")
|
||||||
def shutdown_event():
|
# def shutdown_event():
|
||||||
scheduler.shutdown()
|
# scheduler.shutdown()
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean
|
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from .database import Base
|
from database import Base
|
||||||
|
|
||||||
class NewsItem(Base):
|
class NewsItem(Base):
|
||||||
__tablename__ = "news"
|
__tablename__ = "news"
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from sqlalchemy.sql import func
|
from sqlalchemy.sql import func
|
||||||
from .database import get_db
|
from database import get_db
|
||||||
from .models import NewsItem
|
from models import NewsItem
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
|
@ -2,14 +2,14 @@ import requests
|
|||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
import time
|
import time
|
||||||
from googlenewsdecoder import gnewsdecoder
|
from googlenewsdecoder import gnewsdecoder
|
||||||
from .iacorrector import is_security_related, is_critico, is_favorable # Importa la función desde iacorrector.py
|
from iacorrector import is_security_related, is_critico, is_favorable # Importa la función desde iacorrector.py
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytz
|
import pytz
|
||||||
import logging
|
import logging
|
||||||
from .database import get_db
|
from database import get_db
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from .routes import create_news_item, NewsItemCreate
|
from routes import create_news_item, NewsItemCreate
|
||||||
from .autorsearcher import get_author_from_url
|
from autorsearcher import get_author_from_url
|
||||||
|
|
||||||
# Configuración del logging
|
# Configuración del logging
|
||||||
LOG_FILE = "app.log"
|
LOG_FILE = "app.log"
|
||||||
@ -94,7 +94,7 @@ def search_news(query):
|
|||||||
articles = soup.find_all("item")
|
articles = soup.find_all("item")
|
||||||
news_list = []
|
news_list = []
|
||||||
|
|
||||||
for article in articles[:10]: # Limitar a los primeros 30 artículos
|
for article in articles[:1]: # Limitar a los primeros 30 artículos
|
||||||
try:
|
try:
|
||||||
title = article.title.get_text(strip=True)
|
title = article.title.get_text(strip=True)
|
||||||
content = article.description.get_text(strip=True) if article.description else "Sin descripción"
|
content = article.description.get_text(strip=True) if article.description else "Sin descripción"
|
||||||
|
@ -36,7 +36,7 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
|
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 10
|
retries: 2
|
||||||
|
|
||||||
# metabase:
|
# metabase:
|
||||||
# image: metabase/metabase:latest
|
# image: metabase/metabase:latest
|
||||||
|
Reference in New Issue
Block a user