sql y fastapi
This commit is contained in:
22
scrapper/Dockerfile
Normal file
22
scrapper/Dockerfile
Normal file
@ -0,0 +1,22 @@
|
||||
FROM python:latest
|
||||
RUN apt-get update && apt-get install -y cron
|
||||
|
||||
WORKDIR ./
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
# Copia el archivo crontab al contenedor
|
||||
COPY crontab.txt /etc/cron.d/my_cron_job
|
||||
|
||||
# Asigna los permisos adecuados
|
||||
RUN chmod 0644 /etc/cron.d/my_cron_job
|
||||
|
||||
# Asegura que el archivo se procese por cron
|
||||
RUN touch /var/log/cron.log && chmod 0666 /var/log/cron.log
|
||||
|
||||
|
||||
CMD ["sh", "-c", "cron -f"]
|
||||
|
||||
CMD ["python", "./webscrapper.py"]
|
1
scrapper/app.log
Normal file
1
scrapper/app.log
Normal file
@ -0,0 +1 @@
|
||||
|
1
scrapper/crontab.txt
Normal file
1
scrapper/crontab.txt
Normal file
@ -0,0 +1 @@
|
||||
0 1 * * * python3 /app/main.py >> /var/log/cron.log 2>&1 #modificar para ajustar
|
@ -12,7 +12,7 @@ logging.basicConfig(
|
||||
)
|
||||
|
||||
# Obtener variables de entorno
|
||||
OLLAMA_URL = os.environ.get("OLLAMA_URL", "http://localhost:11434/api/generate")
|
||||
OLLAMA_URL = os.environ.get("OLLAMA_URL", "http://host.docker.internal:11434/api/generate")
|
||||
OLLAMA_MODEL = os.environ.get("OLLAMA_MODEL", "llama3")
|
||||
|
||||
def is_security_related(prompt):
|
||||
|
11
scrapper/requirements.txt
Normal file
11
scrapper/requirements.txt
Normal file
@ -0,0 +1,11 @@
|
||||
fastapi
|
||||
uvicorn
|
||||
requests
|
||||
beautifulsoup4
|
||||
googlenewsdecoder
|
||||
pytz
|
||||
logging
|
||||
sqlalchemy
|
||||
pydantic
|
||||
python-dotenv
|
||||
mysql-connector-python
|
@ -134,7 +134,7 @@ def search_news(query):
|
||||
return news_list
|
||||
|
||||
def insertar_datos(news_item):
|
||||
API_URL = "http://localhost:8000/news/"
|
||||
API_URL = "http://app:8000/news/"
|
||||
|
||||
response = requests.post(API_URL, json=news_item)
|
||||
|
||||
|
Reference in New Issue
Block a user