From 2cd6cdfe3c591a9004cf69d7e35f808491bb6e4b Mon Sep 17 00:00:00 2001 From: jlimolina Date: Mon, 9 Jun 2025 16:00:31 +0200 Subject: [PATCH] cosillas del dashboard --- actualizar_repo.sh | 26 ++++++ app.py | 169 +++++++++++++++++++++++++++++++++++---- templates/base.html | 3 +- templates/dashboard.html | 47 ++++++++--- 4 files changed, 218 insertions(+), 27 deletions(-) create mode 100755 actualizar_repo.sh diff --git a/actualizar_repo.sh b/actualizar_repo.sh new file mode 100755 index 0000000..447376f --- /dev/null +++ b/actualizar_repo.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# --- Script para actualizar el repositorio de Git automĆ”ticamente --- + +echo "šŸš€ Iniciando actualización del repositorio..." + +# 1. Verificar el estado (opcional, pero bueno para ver quĆ© se sube) +echo "----------------------------------------" +git status +echo "----------------------------------------" + +# 2. Preparar todos los archivos modificados y nuevos +echo "āž• AƱadiendo todos los archivos al Ć”rea de preparación (git add .)" +git add . + +# 3. Crear el mensaje del commit con la fecha y hora actual +COMMIT_MSG="Actualización del $(date +'%Y-%m-%d a las %H:%M:%S')" +echo "šŸ’¬ Creando commit con el mensaje: '$COMMIT_MSG'" +git commit -m "$COMMIT_MSG" + +# 4. Subir los cambios a GitHub +echo "ā¬†ļø Subiendo cambios al repositorio remoto (git push)..." +git push + +echo "āœ… Ā”Actualización completada!" + diff --git a/app.py b/app.py index 0bbb2b5..1d69d8f 100644 --- a/app.py +++ b/app.py @@ -5,10 +5,11 @@ import hashlib import re import csv import math -from io import StringIO +from io import StringIO, BytesIO from datetime import datetime, timedelta import logging import atexit +import zipfile from flask import Flask, render_template, request, redirect, url_for, Response, flash from apscheduler.schedulers.background import BackgroundScheduler @@ -22,7 +23,14 @@ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(asctime)s] app = Flask(__name__) app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', os.urandom(24)) -DB_CONFIG = {"host": "localhost", "port": 5432, "dbname": "rss", "user": "rss", "password": "x"} +# Configuración de la base de datos +DB_CONFIG = { + "host": os.environ.get("DB_HOST", "localhost"), + "port": int(os.environ.get("DB_PORT", 5432)), + "dbname": os.environ.get("DB_NAME", "rss"), + "user": os.environ.get("DB_USER", "rss"), + "password": os.environ.get("DB_PASS", "x") +} MAX_FALLOS = 5 def get_conn(): @@ -38,7 +46,11 @@ def safe_html(text): @app.route("/") def home(): noticias, categorias, continentes, paises = [], [], [], [] - cat_id, cont_id, pais_id = request.args.get("categoria_id"), request.args.get("continente_id"), request.args.get("pais_id") + cat_id = request.args.get("categoria_id") + cont_id = request.args.get("continente_id") + pais_id = request.args.get("pais_id") + fecha_filtro = request.args.get("fecha") + try: with get_conn() as conn: with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: @@ -51,23 +63,36 @@ def home(): else: cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre") paises = cursor.fetchall() - + sql_params, conditions = [], [] sql_base = "SELECT n.fecha, n.titulo, n.resumen, n.url, n.imagen_url, c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente FROM noticias n LEFT JOIN categorias c ON n.categoria_id = c.id LEFT JOIN paises p ON n.pais_id = p.id LEFT JOIN continentes co ON p.continente_id = co.id" + if cat_id: conditions.append("n.categoria_id = %s"); sql_params.append(cat_id) if pais_id: conditions.append("n.pais_id = %s"); sql_params.append(pais_id) elif cont_id: conditions.append("p.continente_id = %s"); sql_params.append(cont_id) + + if fecha_filtro: + try: + fecha_obj = datetime.strptime(fecha_filtro, '%Y-%m-%d') + fecha_inicio = fecha_obj + fecha_fin = fecha_obj + timedelta(days=1) + conditions.append("n.fecha >= %s AND n.fecha < %s") + sql_params.extend([fecha_inicio, fecha_fin]) + except ValueError: + flash("Formato de fecha no vĆ”lido. Use AAAA-MM-DD.", "error") + fecha_filtro = None + if conditions: sql_base += " WHERE " + " AND ".join(conditions) - sql_final = sql_base + " ORDER BY n.fecha DESC NULLS LAST LIMIT 50" cursor.execute(sql_final, tuple(sql_params)) noticias = cursor.fetchall() except psycopg2.Error as db_err: app.logger.error(f"[DB ERROR] Al leer noticias: {db_err}", exc_info=True) flash("Error de base de datos al cargar las noticias.", "error") - + return render_template("noticias.html", noticias=noticias, categorias=categorias, continentes=continentes, paises=paises, - cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None, pais_id=int(pais_id) if pais_id else None) + cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None, + pais_id=int(pais_id) if pais_id else None, fecha_filtro=fecha_filtro) @app.route("/feeds") def dashboard(): @@ -106,6 +131,13 @@ def manage_feeds(): total_pages = math.ceil(total_feeds / per_page) return render_template("feeds_list.html", feeds=feeds_list, page=page, total_pages=total_pages, total_feeds=total_feeds) +def _get_form_dependencies(cursor): + cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre") + categorias = cursor.fetchall() + cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre") + paises = cursor.fetchall() + return categorias, paises + @app.route("/feeds/add", methods=['GET', 'POST']) def add_feed(): if request.method == 'POST': @@ -122,15 +154,12 @@ def add_feed(): app.logger.error(f"[DB ERROR] Al agregar feed: {db_err}", exc_info=True) flash(f"Error al aƱadir el feed: {db_err}", "error") return redirect(url_for("dashboard")) - + categorias, paises = [], [] try: with get_conn() as conn: with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: - cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre") - categorias = cursor.fetchall() - cursor.execute("SELECT id, nombre FROM paises ORDER BY nombre") - paises = cursor.fetchall() + categorias, paises = _get_form_dependencies(cursor) except psycopg2.Error as db_err: app.logger.error(f"[DB ERROR] Al cargar formulario para aƱadir feed: {db_err}") flash("No se pudieron cargar las categorĆ­as o paĆ­ses.", "error") @@ -151,19 +180,20 @@ def edit_feed(feed_id): ) flash("Feed actualizado correctamente.", "success") return redirect(url_for("manage_feeds")) + cursor.execute("SELECT * FROM feeds WHERE id = %s", (feed_id,)) feed = cursor.fetchone() - cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre") - categorias = cursor.fetchall() - cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre") - paises = cursor.fetchall() + categorias, paises = _get_form_dependencies(cursor) + except psycopg2.Error as db_err: app.logger.error(f"[DB ERROR] Al editar feed: {db_err}", exc_info=True) flash(f"Error al editar el feed: {db_err}", "error") return redirect(url_for("manage_feeds")) + if not feed: flash("No se encontró el feed solicitado.", "error") return redirect(url_for("manage_feeds")) + return render_template("edit_feed.html", feed=feed, categorias=categorias, paises=paises) @app.route("/delete/") @@ -207,10 +237,12 @@ def backup_feeds(): if not feeds_: flash("No hay feeds para exportar.", "warning") return redirect(url_for("dashboard")) + si = StringIO() writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description]) writer.writeheader() writer.writerows([dict(row) for row in feeds_]) + output = si.getvalue() si.close() return Response(output, mimetype="text/csv", headers={"Content-Disposition": "attachment;filename=feeds_backup.csv"}) @@ -219,6 +251,100 @@ def backup_feeds(): flash("Error al generar el backup.", "error") return redirect(url_for("dashboard")) +@app.route("/backup_noticias") +def backup_noticias(): + try: + with get_conn() as conn: + with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: + cursor.execute(""" + SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url, + c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente + FROM noticias n + LEFT JOIN categorias c ON n.categoria_id = c.id + LEFT JOIN paises p ON n.pais_id = p.id + LEFT JOIN continentes co ON p.continente_id = co.id + ORDER BY n.fecha DESC + """) + noticias = cursor.fetchall() + if not noticias: + flash("No hay noticias para exportar.", "warning") + return redirect(url_for("dashboard")) + + si = StringIO() + writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description]) + writer.writeheader() + writer.writerows([dict(row) for row in noticias]) + + output = si.getvalue() + si.close() + return Response( + output, + mimetype="text/csv", + headers={"Content-Disposition": "attachment;filename=noticias_backup.csv"} + ) + except Exception as e: + app.logger.error(f"[ERROR] Al hacer backup de noticias: {e}", exc_info=True) + flash("Error al generar el backup de noticias.", "error") + return redirect(url_for("dashboard")) + +@app.route("/backup_completo") +def backup_completo(): + try: + memory_buffer = BytesIO() + + with zipfile.ZipFile(memory_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf: + with get_conn() as conn: + with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: + # Feeds + cursor.execute(""" + SELECT f.id, f.nombre, f.descripcion, f.url, f.categoria_id, c.nombre AS categoria, + f.pais_id, p.nombre AS pais, f.idioma, f.activo, f.fallos + FROM feeds f + LEFT JOIN categorias c ON f.categoria_id = c.id + LEFT JOIN paises p ON f.pais_id = p.id + ORDER BY f.id + """) + feeds_data = cursor.fetchall() + + if feeds_data: + feeds_si = StringIO() + writer = csv.DictWriter(feeds_si, fieldnames=[desc[0] for desc in cursor.description]) + writer.writeheader() + writer.writerows([dict(row) for row in feeds_data]) + zipf.writestr("feeds.csv", feeds_si.getvalue()) + feeds_si.close() + + # Noticias + cursor.execute(""" + SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url, + c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente + FROM noticias n + LEFT JOIN categorias c ON n.categoria_id = c.id + LEFT JOIN paises p ON n.pais_id = p.id + LEFT JOIN continentes co ON p.continente_id = co.id + ORDER BY n.fecha DESC + """) + noticias_data = cursor.fetchall() + + if noticias_data: + noticias_si = StringIO() + writer = csv.DictWriter(noticias_si, fieldnames=[desc[0] for desc in cursor.description]) + writer.writeheader() + writer.writerows([dict(row) for row in noticias_data]) + zipf.writestr("noticias.csv", noticias_si.getvalue()) + noticias_si.close() + + memory_buffer.seek(0) + return Response( + memory_buffer, + mimetype="application/zip", + headers={"Content-Disposition": "attachment;filename=rss_backup_completo.zip"} + ) + except Exception as e: + app.logger.error(f"[ERROR] Al hacer backup completo: {e}", exc_info=True) + flash("Error al generar el backup completo.", "error") + return redirect(url_for("dashboard")) + @app.route("/restore_feeds", methods=["GET", "POST"]) def restore_feeds(): if request.method == "POST": @@ -226,11 +352,13 @@ def restore_feeds(): if not file or not file.filename.endswith(".csv"): flash("Archivo no vĆ”lido. Por favor, sube un archivo .csv.", "error") return redirect(url_for("restore_feeds")) + try: file_stream = StringIO(file.read().decode("utf-8")) reader = csv.DictReader(file_stream) rows = list(reader) n_ok, n_err = 0, 0 + with get_conn() as conn: with conn.cursor() as cursor: for row in rows: @@ -240,7 +368,7 @@ def restore_feeds(): cursor.execute( """ INSERT INTO feeds (id, nombre, descripcion, url, categoria_id, pais_id, idioma, activo, fallos) - VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idioma)s, %(activo)s, %(fallos)s) + VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idoma)s, %(activo)s, %(fallos)s) ON CONFLICT (id) DO UPDATE SET nombre = EXCLUDED.nombre, descripcion = EXCLUDED.descripcion, url = EXCLUDED.url, categoria_id = EXCLUDED.categoria_id, pais_id = EXCLUDED.pais_id, idioma = EXCLUDED.idioma, @@ -262,6 +390,7 @@ def restore_feeds(): app.logger.error(f"Error al restaurar feeds desde CSV: {e}", exc_info=True) flash(f"Ocurrió un error general al procesar el archivo: {e}", "error") return redirect(url_for("dashboard")) + return render_template("restore_feeds.html") def sumar_fallo_feed(cursor, feed_id): @@ -285,6 +414,7 @@ def fetch_and_store(): if not feeds_to_process: app.logger.info("No hay feeds activos para procesar.") return + for feed in feeds_to_process: try: app.logger.info(f"Procesando feed: {feed['url']}") @@ -293,7 +423,9 @@ def fetch_and_store(): app.logger.warning(f"[BOZO] Feed mal formado: {feed['url']} - Excepción: {parsed.bozo_exception}") sumar_fallo_feed(cursor, feed['id']) continue + resetear_fallos_feed(cursor, feed['id']) + for entry in parsed.entries: try: link = entry.get("link") @@ -301,15 +433,18 @@ def fetch_and_store(): noticia_id = hashlib.md5(link.encode()).hexdigest() titulo = entry.get("title", "") resumen = entry.get("summary", "") + imagen_url = "" if "media_content" in entry and entry.media_content: imagen_url = entry.media_content[0].get("url", "") elif " - + +