cosillas del dashboard

This commit is contained in:
jlimolina 2025-06-09 16:00:31 +02:00
parent e9264bc6ce
commit 2cd6cdfe3c
4 changed files with 218 additions and 27 deletions

169
app.py
View file

@ -5,10 +5,11 @@ import hashlib
import re
import csv
import math
from io import StringIO
from io import StringIO, BytesIO
from datetime import datetime, timedelta
import logging
import atexit
import zipfile
from flask import Flask, render_template, request, redirect, url_for, Response, flash
from apscheduler.schedulers.background import BackgroundScheduler
@ -22,7 +23,14 @@ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(asctime)s]
app = Flask(__name__)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', os.urandom(24))
DB_CONFIG = {"host": "localhost", "port": 5432, "dbname": "rss", "user": "rss", "password": "x"}
# Configuración de la base de datos
DB_CONFIG = {
"host": os.environ.get("DB_HOST", "localhost"),
"port": int(os.environ.get("DB_PORT", 5432)),
"dbname": os.environ.get("DB_NAME", "rss"),
"user": os.environ.get("DB_USER", "rss"),
"password": os.environ.get("DB_PASS", "x")
}
MAX_FALLOS = 5
def get_conn():
@ -38,7 +46,11 @@ def safe_html(text):
@app.route("/")
def home():
noticias, categorias, continentes, paises = [], [], [], []
cat_id, cont_id, pais_id = request.args.get("categoria_id"), request.args.get("continente_id"), request.args.get("pais_id")
cat_id = request.args.get("categoria_id")
cont_id = request.args.get("continente_id")
pais_id = request.args.get("pais_id")
fecha_filtro = request.args.get("fecha")
try:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
@ -51,23 +63,36 @@ def home():
else:
cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre")
paises = cursor.fetchall()
sql_params, conditions = [], []
sql_base = "SELECT n.fecha, n.titulo, n.resumen, n.url, n.imagen_url, c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente FROM noticias n LEFT JOIN categorias c ON n.categoria_id = c.id LEFT JOIN paises p ON n.pais_id = p.id LEFT JOIN continentes co ON p.continente_id = co.id"
if cat_id: conditions.append("n.categoria_id = %s"); sql_params.append(cat_id)
if pais_id: conditions.append("n.pais_id = %s"); sql_params.append(pais_id)
elif cont_id: conditions.append("p.continente_id = %s"); sql_params.append(cont_id)
if fecha_filtro:
try:
fecha_obj = datetime.strptime(fecha_filtro, '%Y-%m-%d')
fecha_inicio = fecha_obj
fecha_fin = fecha_obj + timedelta(days=1)
conditions.append("n.fecha >= %s AND n.fecha < %s")
sql_params.extend([fecha_inicio, fecha_fin])
except ValueError:
flash("Formato de fecha no válido. Use AAAA-MM-DD.", "error")
fecha_filtro = None
if conditions: sql_base += " WHERE " + " AND ".join(conditions)
sql_final = sql_base + " ORDER BY n.fecha DESC NULLS LAST LIMIT 50"
cursor.execute(sql_final, tuple(sql_params))
noticias = cursor.fetchall()
except psycopg2.Error as db_err:
app.logger.error(f"[DB ERROR] Al leer noticias: {db_err}", exc_info=True)
flash("Error de base de datos al cargar las noticias.", "error")
return render_template("noticias.html", noticias=noticias, categorias=categorias, continentes=continentes, paises=paises,
cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None, pais_id=int(pais_id) if pais_id else None)
cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None,
pais_id=int(pais_id) if pais_id else None, fecha_filtro=fecha_filtro)
@app.route("/feeds")
def dashboard():
@ -106,6 +131,13 @@ def manage_feeds():
total_pages = math.ceil(total_feeds / per_page)
return render_template("feeds_list.html", feeds=feeds_list, page=page, total_pages=total_pages, total_feeds=total_feeds)
def _get_form_dependencies(cursor):
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre")
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre")
paises = cursor.fetchall()
return categorias, paises
@app.route("/feeds/add", methods=['GET', 'POST'])
def add_feed():
if request.method == 'POST':
@ -122,15 +154,12 @@ def add_feed():
app.logger.error(f"[DB ERROR] Al agregar feed: {db_err}", exc_info=True)
flash(f"Error al añadir el feed: {db_err}", "error")
return redirect(url_for("dashboard"))
categorias, paises = [], []
try:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre")
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre FROM paises ORDER BY nombre")
paises = cursor.fetchall()
categorias, paises = _get_form_dependencies(cursor)
except psycopg2.Error as db_err:
app.logger.error(f"[DB ERROR] Al cargar formulario para añadir feed: {db_err}")
flash("No se pudieron cargar las categorías o países.", "error")
@ -151,19 +180,20 @@ def edit_feed(feed_id):
)
flash("Feed actualizado correctamente.", "success")
return redirect(url_for("manage_feeds"))
cursor.execute("SELECT * FROM feeds WHERE id = %s", (feed_id,))
feed = cursor.fetchone()
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre")
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre")
paises = cursor.fetchall()
categorias, paises = _get_form_dependencies(cursor)
except psycopg2.Error as db_err:
app.logger.error(f"[DB ERROR] Al editar feed: {db_err}", exc_info=True)
flash(f"Error al editar el feed: {db_err}", "error")
return redirect(url_for("manage_feeds"))
if not feed:
flash("No se encontró el feed solicitado.", "error")
return redirect(url_for("manage_feeds"))
return render_template("edit_feed.html", feed=feed, categorias=categorias, paises=paises)
@app.route("/delete/<int:feed_id>")
@ -207,10 +237,12 @@ def backup_feeds():
if not feeds_:
flash("No hay feeds para exportar.", "warning")
return redirect(url_for("dashboard"))
si = StringIO()
writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in feeds_])
output = si.getvalue()
si.close()
return Response(output, mimetype="text/csv", headers={"Content-Disposition": "attachment;filename=feeds_backup.csv"})
@ -219,6 +251,100 @@ def backup_feeds():
flash("Error al generar el backup.", "error")
return redirect(url_for("dashboard"))
@app.route("/backup_noticias")
def backup_noticias():
try:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("""
SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url,
c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente
FROM noticias n
LEFT JOIN categorias c ON n.categoria_id = c.id
LEFT JOIN paises p ON n.pais_id = p.id
LEFT JOIN continentes co ON p.continente_id = co.id
ORDER BY n.fecha DESC
""")
noticias = cursor.fetchall()
if not noticias:
flash("No hay noticias para exportar.", "warning")
return redirect(url_for("dashboard"))
si = StringIO()
writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in noticias])
output = si.getvalue()
si.close()
return Response(
output,
mimetype="text/csv",
headers={"Content-Disposition": "attachment;filename=noticias_backup.csv"}
)
except Exception as e:
app.logger.error(f"[ERROR] Al hacer backup de noticias: {e}", exc_info=True)
flash("Error al generar el backup de noticias.", "error")
return redirect(url_for("dashboard"))
@app.route("/backup_completo")
def backup_completo():
try:
memory_buffer = BytesIO()
with zipfile.ZipFile(memory_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
# Feeds
cursor.execute("""
SELECT f.id, f.nombre, f.descripcion, f.url, f.categoria_id, c.nombre AS categoria,
f.pais_id, p.nombre AS pais, f.idioma, f.activo, f.fallos
FROM feeds f
LEFT JOIN categorias c ON f.categoria_id = c.id
LEFT JOIN paises p ON f.pais_id = p.id
ORDER BY f.id
""")
feeds_data = cursor.fetchall()
if feeds_data:
feeds_si = StringIO()
writer = csv.DictWriter(feeds_si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in feeds_data])
zipf.writestr("feeds.csv", feeds_si.getvalue())
feeds_si.close()
# Noticias
cursor.execute("""
SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url,
c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente
FROM noticias n
LEFT JOIN categorias c ON n.categoria_id = c.id
LEFT JOIN paises p ON n.pais_id = p.id
LEFT JOIN continentes co ON p.continente_id = co.id
ORDER BY n.fecha DESC
""")
noticias_data = cursor.fetchall()
if noticias_data:
noticias_si = StringIO()
writer = csv.DictWriter(noticias_si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in noticias_data])
zipf.writestr("noticias.csv", noticias_si.getvalue())
noticias_si.close()
memory_buffer.seek(0)
return Response(
memory_buffer,
mimetype="application/zip",
headers={"Content-Disposition": "attachment;filename=rss_backup_completo.zip"}
)
except Exception as e:
app.logger.error(f"[ERROR] Al hacer backup completo: {e}", exc_info=True)
flash("Error al generar el backup completo.", "error")
return redirect(url_for("dashboard"))
@app.route("/restore_feeds", methods=["GET", "POST"])
def restore_feeds():
if request.method == "POST":
@ -226,11 +352,13 @@ def restore_feeds():
if not file or not file.filename.endswith(".csv"):
flash("Archivo no válido. Por favor, sube un archivo .csv.", "error")
return redirect(url_for("restore_feeds"))
try:
file_stream = StringIO(file.read().decode("utf-8"))
reader = csv.DictReader(file_stream)
rows = list(reader)
n_ok, n_err = 0, 0
with get_conn() as conn:
with conn.cursor() as cursor:
for row in rows:
@ -240,7 +368,7 @@ def restore_feeds():
cursor.execute(
"""
INSERT INTO feeds (id, nombre, descripcion, url, categoria_id, pais_id, idioma, activo, fallos)
VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idioma)s, %(activo)s, %(fallos)s)
VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idoma)s, %(activo)s, %(fallos)s)
ON CONFLICT (id) DO UPDATE SET
nombre = EXCLUDED.nombre, descripcion = EXCLUDED.descripcion, url = EXCLUDED.url,
categoria_id = EXCLUDED.categoria_id, pais_id = EXCLUDED.pais_id, idioma = EXCLUDED.idioma,
@ -262,6 +390,7 @@ def restore_feeds():
app.logger.error(f"Error al restaurar feeds desde CSV: {e}", exc_info=True)
flash(f"Ocurrió un error general al procesar el archivo: {e}", "error")
return redirect(url_for("dashboard"))
return render_template("restore_feeds.html")
def sumar_fallo_feed(cursor, feed_id):
@ -285,6 +414,7 @@ def fetch_and_store():
if not feeds_to_process:
app.logger.info("No hay feeds activos para procesar.")
return
for feed in feeds_to_process:
try:
app.logger.info(f"Procesando feed: {feed['url']}")
@ -293,7 +423,9 @@ def fetch_and_store():
app.logger.warning(f"[BOZO] Feed mal formado: {feed['url']} - Excepción: {parsed.bozo_exception}")
sumar_fallo_feed(cursor, feed['id'])
continue
resetear_fallos_feed(cursor, feed['id'])
for entry in parsed.entries:
try:
link = entry.get("link")
@ -301,15 +433,18 @@ def fetch_and_store():
noticia_id = hashlib.md5(link.encode()).hexdigest()
titulo = entry.get("title", "")
resumen = entry.get("summary", "")
imagen_url = ""
if "media_content" in entry and entry.media_content:
imagen_url = entry.media_content[0].get("url", "")
elif "<img" in resumen:
img_search = re.search(r'src="([^"]+)"', resumen)
if img_search: imagen_url = img_search.group(1)
fecha_publicacion = None
if "published_parsed" in entry and entry.published_parsed: fecha_publicacion = datetime(*entry.published_parsed[:6])
elif "updated_parsed" in entry and entry.updated_parsed: fecha_publicacion = datetime(*entry.updated_parsed[:6])
cursor.execute(
"INSERT INTO noticias (id, titulo, resumen, url, fecha, imagen_url, categoria_id, pais_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT (id) DO NOTHING",
(noticia_id, titulo, resumen, link, fecha_publicacion, imagen_url, feed['categoria_id'], feed['pais_id'])