cosillas del dashboard

This commit is contained in:
jlimolina 2025-06-09 16:00:31 +02:00
parent e9264bc6ce
commit 2cd6cdfe3c
4 changed files with 218 additions and 27 deletions

26
actualizar_repo.sh Executable file
View file

@ -0,0 +1,26 @@
#!/bin/bash
# --- Script para actualizar el repositorio de Git automáticamente ---
echo "🚀 Iniciando actualización del repositorio..."
# 1. Verificar el estado (opcional, pero bueno para ver qué se sube)
echo "----------------------------------------"
git status
echo "----------------------------------------"
# 2. Preparar todos los archivos modificados y nuevos
echo " Añadiendo todos los archivos al área de preparación (git add .)"
git add .
# 3. Crear el mensaje del commit con la fecha y hora actual
COMMIT_MSG="Actualización del $(date +'%Y-%m-%d a las %H:%M:%S')"
echo "💬 Creando commit con el mensaje: '$COMMIT_MSG'"
git commit -m "$COMMIT_MSG"
# 4. Subir los cambios a GitHub
echo "⬆️ Subiendo cambios al repositorio remoto (git push)..."
git push
echo "✅ ¡Actualización completada!"

163
app.py
View file

@ -5,10 +5,11 @@ import hashlib
import re import re
import csv import csv
import math import math
from io import StringIO from io import StringIO, BytesIO
from datetime import datetime, timedelta from datetime import datetime, timedelta
import logging import logging
import atexit import atexit
import zipfile
from flask import Flask, render_template, request, redirect, url_for, Response, flash from flask import Flask, render_template, request, redirect, url_for, Response, flash
from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.schedulers.background import BackgroundScheduler
@ -22,7 +23,14 @@ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(asctime)s]
app = Flask(__name__) app = Flask(__name__)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', os.urandom(24)) app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', os.urandom(24))
DB_CONFIG = {"host": "localhost", "port": 5432, "dbname": "rss", "user": "rss", "password": "x"} # Configuración de la base de datos
DB_CONFIG = {
"host": os.environ.get("DB_HOST", "localhost"),
"port": int(os.environ.get("DB_PORT", 5432)),
"dbname": os.environ.get("DB_NAME", "rss"),
"user": os.environ.get("DB_USER", "rss"),
"password": os.environ.get("DB_PASS", "x")
}
MAX_FALLOS = 5 MAX_FALLOS = 5
def get_conn(): def get_conn():
@ -38,7 +46,11 @@ def safe_html(text):
@app.route("/") @app.route("/")
def home(): def home():
noticias, categorias, continentes, paises = [], [], [], [] noticias, categorias, continentes, paises = [], [], [], []
cat_id, cont_id, pais_id = request.args.get("categoria_id"), request.args.get("continente_id"), request.args.get("pais_id") cat_id = request.args.get("categoria_id")
cont_id = request.args.get("continente_id")
pais_id = request.args.get("pais_id")
fecha_filtro = request.args.get("fecha")
try: try:
with get_conn() as conn: with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
@ -54,11 +66,23 @@ def home():
sql_params, conditions = [], [] sql_params, conditions = [], []
sql_base = "SELECT n.fecha, n.titulo, n.resumen, n.url, n.imagen_url, c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente FROM noticias n LEFT JOIN categorias c ON n.categoria_id = c.id LEFT JOIN paises p ON n.pais_id = p.id LEFT JOIN continentes co ON p.continente_id = co.id" sql_base = "SELECT n.fecha, n.titulo, n.resumen, n.url, n.imagen_url, c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente FROM noticias n LEFT JOIN categorias c ON n.categoria_id = c.id LEFT JOIN paises p ON n.pais_id = p.id LEFT JOIN continentes co ON p.continente_id = co.id"
if cat_id: conditions.append("n.categoria_id = %s"); sql_params.append(cat_id) if cat_id: conditions.append("n.categoria_id = %s"); sql_params.append(cat_id)
if pais_id: conditions.append("n.pais_id = %s"); sql_params.append(pais_id) if pais_id: conditions.append("n.pais_id = %s"); sql_params.append(pais_id)
elif cont_id: conditions.append("p.continente_id = %s"); sql_params.append(cont_id) elif cont_id: conditions.append("p.continente_id = %s"); sql_params.append(cont_id)
if conditions: sql_base += " WHERE " + " AND ".join(conditions)
if fecha_filtro:
try:
fecha_obj = datetime.strptime(fecha_filtro, '%Y-%m-%d')
fecha_inicio = fecha_obj
fecha_fin = fecha_obj + timedelta(days=1)
conditions.append("n.fecha >= %s AND n.fecha < %s")
sql_params.extend([fecha_inicio, fecha_fin])
except ValueError:
flash("Formato de fecha no válido. Use AAAA-MM-DD.", "error")
fecha_filtro = None
if conditions: sql_base += " WHERE " + " AND ".join(conditions)
sql_final = sql_base + " ORDER BY n.fecha DESC NULLS LAST LIMIT 50" sql_final = sql_base + " ORDER BY n.fecha DESC NULLS LAST LIMIT 50"
cursor.execute(sql_final, tuple(sql_params)) cursor.execute(sql_final, tuple(sql_params))
noticias = cursor.fetchall() noticias = cursor.fetchall()
@ -67,7 +91,8 @@ def home():
flash("Error de base de datos al cargar las noticias.", "error") flash("Error de base de datos al cargar las noticias.", "error")
return render_template("noticias.html", noticias=noticias, categorias=categorias, continentes=continentes, paises=paises, return render_template("noticias.html", noticias=noticias, categorias=categorias, continentes=continentes, paises=paises,
cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None, pais_id=int(pais_id) if pais_id else None) cat_id=int(cat_id) if cat_id else None, cont_id=int(cont_id) if cont_id else None,
pais_id=int(pais_id) if pais_id else None, fecha_filtro=fecha_filtro)
@app.route("/feeds") @app.route("/feeds")
def dashboard(): def dashboard():
@ -106,6 +131,13 @@ def manage_feeds():
total_pages = math.ceil(total_feeds / per_page) total_pages = math.ceil(total_feeds / per_page)
return render_template("feeds_list.html", feeds=feeds_list, page=page, total_pages=total_pages, total_feeds=total_feeds) return render_template("feeds_list.html", feeds=feeds_list, page=page, total_pages=total_pages, total_feeds=total_feeds)
def _get_form_dependencies(cursor):
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre")
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre")
paises = cursor.fetchall()
return categorias, paises
@app.route("/feeds/add", methods=['GET', 'POST']) @app.route("/feeds/add", methods=['GET', 'POST'])
def add_feed(): def add_feed():
if request.method == 'POST': if request.method == 'POST':
@ -127,10 +159,7 @@ def add_feed():
try: try:
with get_conn() as conn: with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre") categorias, paises = _get_form_dependencies(cursor)
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre FROM paises ORDER BY nombre")
paises = cursor.fetchall()
except psycopg2.Error as db_err: except psycopg2.Error as db_err:
app.logger.error(f"[DB ERROR] Al cargar formulario para añadir feed: {db_err}") app.logger.error(f"[DB ERROR] Al cargar formulario para añadir feed: {db_err}")
flash("No se pudieron cargar las categorías o países.", "error") flash("No se pudieron cargar las categorías o países.", "error")
@ -151,19 +180,20 @@ def edit_feed(feed_id):
) )
flash("Feed actualizado correctamente.", "success") flash("Feed actualizado correctamente.", "success")
return redirect(url_for("manage_feeds")) return redirect(url_for("manage_feeds"))
cursor.execute("SELECT * FROM feeds WHERE id = %s", (feed_id,)) cursor.execute("SELECT * FROM feeds WHERE id = %s", (feed_id,))
feed = cursor.fetchone() feed = cursor.fetchone()
cursor.execute("SELECT id, nombre FROM categorias ORDER BY nombre") categorias, paises = _get_form_dependencies(cursor)
categorias = cursor.fetchall()
cursor.execute("SELECT id, nombre, continente_id FROM paises ORDER BY nombre")
paises = cursor.fetchall()
except psycopg2.Error as db_err: except psycopg2.Error as db_err:
app.logger.error(f"[DB ERROR] Al editar feed: {db_err}", exc_info=True) app.logger.error(f"[DB ERROR] Al editar feed: {db_err}", exc_info=True)
flash(f"Error al editar el feed: {db_err}", "error") flash(f"Error al editar el feed: {db_err}", "error")
return redirect(url_for("manage_feeds")) return redirect(url_for("manage_feeds"))
if not feed: if not feed:
flash("No se encontró el feed solicitado.", "error") flash("No se encontró el feed solicitado.", "error")
return redirect(url_for("manage_feeds")) return redirect(url_for("manage_feeds"))
return render_template("edit_feed.html", feed=feed, categorias=categorias, paises=paises) return render_template("edit_feed.html", feed=feed, categorias=categorias, paises=paises)
@app.route("/delete/<int:feed_id>") @app.route("/delete/<int:feed_id>")
@ -207,10 +237,12 @@ def backup_feeds():
if not feeds_: if not feeds_:
flash("No hay feeds para exportar.", "warning") flash("No hay feeds para exportar.", "warning")
return redirect(url_for("dashboard")) return redirect(url_for("dashboard"))
si = StringIO() si = StringIO()
writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description]) writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader() writer.writeheader()
writer.writerows([dict(row) for row in feeds_]) writer.writerows([dict(row) for row in feeds_])
output = si.getvalue() output = si.getvalue()
si.close() si.close()
return Response(output, mimetype="text/csv", headers={"Content-Disposition": "attachment;filename=feeds_backup.csv"}) return Response(output, mimetype="text/csv", headers={"Content-Disposition": "attachment;filename=feeds_backup.csv"})
@ -219,6 +251,100 @@ def backup_feeds():
flash("Error al generar el backup.", "error") flash("Error al generar el backup.", "error")
return redirect(url_for("dashboard")) return redirect(url_for("dashboard"))
@app.route("/backup_noticias")
def backup_noticias():
try:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute("""
SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url,
c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente
FROM noticias n
LEFT JOIN categorias c ON n.categoria_id = c.id
LEFT JOIN paises p ON n.pais_id = p.id
LEFT JOIN continentes co ON p.continente_id = co.id
ORDER BY n.fecha DESC
""")
noticias = cursor.fetchall()
if not noticias:
flash("No hay noticias para exportar.", "warning")
return redirect(url_for("dashboard"))
si = StringIO()
writer = csv.DictWriter(si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in noticias])
output = si.getvalue()
si.close()
return Response(
output,
mimetype="text/csv",
headers={"Content-Disposition": "attachment;filename=noticias_backup.csv"}
)
except Exception as e:
app.logger.error(f"[ERROR] Al hacer backup de noticias: {e}", exc_info=True)
flash("Error al generar el backup de noticias.", "error")
return redirect(url_for("dashboard"))
@app.route("/backup_completo")
def backup_completo():
try:
memory_buffer = BytesIO()
with zipfile.ZipFile(memory_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf:
with get_conn() as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
# Feeds
cursor.execute("""
SELECT f.id, f.nombre, f.descripcion, f.url, f.categoria_id, c.nombre AS categoria,
f.pais_id, p.nombre AS pais, f.idioma, f.activo, f.fallos
FROM feeds f
LEFT JOIN categorias c ON f.categoria_id = c.id
LEFT JOIN paises p ON f.pais_id = p.id
ORDER BY f.id
""")
feeds_data = cursor.fetchall()
if feeds_data:
feeds_si = StringIO()
writer = csv.DictWriter(feeds_si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in feeds_data])
zipf.writestr("feeds.csv", feeds_si.getvalue())
feeds_si.close()
# Noticias
cursor.execute("""
SELECT n.id, n.titulo, n.resumen, n.url, n.fecha, n.imagen_url,
c.nombre AS categoria, p.nombre AS pais, co.nombre AS continente
FROM noticias n
LEFT JOIN categorias c ON n.categoria_id = c.id
LEFT JOIN paises p ON n.pais_id = p.id
LEFT JOIN continentes co ON p.continente_id = co.id
ORDER BY n.fecha DESC
""")
noticias_data = cursor.fetchall()
if noticias_data:
noticias_si = StringIO()
writer = csv.DictWriter(noticias_si, fieldnames=[desc[0] for desc in cursor.description])
writer.writeheader()
writer.writerows([dict(row) for row in noticias_data])
zipf.writestr("noticias.csv", noticias_si.getvalue())
noticias_si.close()
memory_buffer.seek(0)
return Response(
memory_buffer,
mimetype="application/zip",
headers={"Content-Disposition": "attachment;filename=rss_backup_completo.zip"}
)
except Exception as e:
app.logger.error(f"[ERROR] Al hacer backup completo: {e}", exc_info=True)
flash("Error al generar el backup completo.", "error")
return redirect(url_for("dashboard"))
@app.route("/restore_feeds", methods=["GET", "POST"]) @app.route("/restore_feeds", methods=["GET", "POST"])
def restore_feeds(): def restore_feeds():
if request.method == "POST": if request.method == "POST":
@ -226,11 +352,13 @@ def restore_feeds():
if not file or not file.filename.endswith(".csv"): if not file or not file.filename.endswith(".csv"):
flash("Archivo no válido. Por favor, sube un archivo .csv.", "error") flash("Archivo no válido. Por favor, sube un archivo .csv.", "error")
return redirect(url_for("restore_feeds")) return redirect(url_for("restore_feeds"))
try: try:
file_stream = StringIO(file.read().decode("utf-8")) file_stream = StringIO(file.read().decode("utf-8"))
reader = csv.DictReader(file_stream) reader = csv.DictReader(file_stream)
rows = list(reader) rows = list(reader)
n_ok, n_err = 0, 0 n_ok, n_err = 0, 0
with get_conn() as conn: with get_conn() as conn:
with conn.cursor() as cursor: with conn.cursor() as cursor:
for row in rows: for row in rows:
@ -240,7 +368,7 @@ def restore_feeds():
cursor.execute( cursor.execute(
""" """
INSERT INTO feeds (id, nombre, descripcion, url, categoria_id, pais_id, idioma, activo, fallos) INSERT INTO feeds (id, nombre, descripcion, url, categoria_id, pais_id, idioma, activo, fallos)
VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idioma)s, %(activo)s, %(fallos)s) VALUES (%(id)s, %(nombre)s, %(descripcion)s, %(url)s, %(categoria_id)s, %(pais_id)s, %(idoma)s, %(activo)s, %(fallos)s)
ON CONFLICT (id) DO UPDATE SET ON CONFLICT (id) DO UPDATE SET
nombre = EXCLUDED.nombre, descripcion = EXCLUDED.descripcion, url = EXCLUDED.url, nombre = EXCLUDED.nombre, descripcion = EXCLUDED.descripcion, url = EXCLUDED.url,
categoria_id = EXCLUDED.categoria_id, pais_id = EXCLUDED.pais_id, idioma = EXCLUDED.idioma, categoria_id = EXCLUDED.categoria_id, pais_id = EXCLUDED.pais_id, idioma = EXCLUDED.idioma,
@ -262,6 +390,7 @@ def restore_feeds():
app.logger.error(f"Error al restaurar feeds desde CSV: {e}", exc_info=True) app.logger.error(f"Error al restaurar feeds desde CSV: {e}", exc_info=True)
flash(f"Ocurrió un error general al procesar el archivo: {e}", "error") flash(f"Ocurrió un error general al procesar el archivo: {e}", "error")
return redirect(url_for("dashboard")) return redirect(url_for("dashboard"))
return render_template("restore_feeds.html") return render_template("restore_feeds.html")
def sumar_fallo_feed(cursor, feed_id): def sumar_fallo_feed(cursor, feed_id):
@ -285,6 +414,7 @@ def fetch_and_store():
if not feeds_to_process: if not feeds_to_process:
app.logger.info("No hay feeds activos para procesar.") app.logger.info("No hay feeds activos para procesar.")
return return
for feed in feeds_to_process: for feed in feeds_to_process:
try: try:
app.logger.info(f"Procesando feed: {feed['url']}") app.logger.info(f"Procesando feed: {feed['url']}")
@ -293,7 +423,9 @@ def fetch_and_store():
app.logger.warning(f"[BOZO] Feed mal formado: {feed['url']} - Excepción: {parsed.bozo_exception}") app.logger.warning(f"[BOZO] Feed mal formado: {feed['url']} - Excepción: {parsed.bozo_exception}")
sumar_fallo_feed(cursor, feed['id']) sumar_fallo_feed(cursor, feed['id'])
continue continue
resetear_fallos_feed(cursor, feed['id']) resetear_fallos_feed(cursor, feed['id'])
for entry in parsed.entries: for entry in parsed.entries:
try: try:
link = entry.get("link") link = entry.get("link")
@ -301,15 +433,18 @@ def fetch_and_store():
noticia_id = hashlib.md5(link.encode()).hexdigest() noticia_id = hashlib.md5(link.encode()).hexdigest()
titulo = entry.get("title", "") titulo = entry.get("title", "")
resumen = entry.get("summary", "") resumen = entry.get("summary", "")
imagen_url = "" imagen_url = ""
if "media_content" in entry and entry.media_content: if "media_content" in entry and entry.media_content:
imagen_url = entry.media_content[0].get("url", "") imagen_url = entry.media_content[0].get("url", "")
elif "<img" in resumen: elif "<img" in resumen:
img_search = re.search(r'src="([^"]+)"', resumen) img_search = re.search(r'src="([^"]+)"', resumen)
if img_search: imagen_url = img_search.group(1) if img_search: imagen_url = img_search.group(1)
fecha_publicacion = None fecha_publicacion = None
if "published_parsed" in entry and entry.published_parsed: fecha_publicacion = datetime(*entry.published_parsed[:6]) if "published_parsed" in entry and entry.published_parsed: fecha_publicacion = datetime(*entry.published_parsed[:6])
elif "updated_parsed" in entry and entry.updated_parsed: fecha_publicacion = datetime(*entry.updated_parsed[:6]) elif "updated_parsed" in entry and entry.updated_parsed: fecha_publicacion = datetime(*entry.updated_parsed[:6])
cursor.execute( cursor.execute(
"INSERT INTO noticias (id, titulo, resumen, url, fecha, imagen_url, categoria_id, pais_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT (id) DO NOTHING", "INSERT INTO noticias (id, titulo, resumen, url, fecha, imagen_url, categoria_id, pais_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT (id) DO NOTHING",
(noticia_id, titulo, resumen, link, fecha_publicacion, imagen_url, feed['categoria_id'], feed['pais_id']) (noticia_id, titulo, resumen, link, fecha_publicacion, imagen_url, feed['categoria_id'], feed['pais_id'])

View file

@ -8,6 +8,7 @@
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;500;600;700&display=swap" rel="stylesheet"> <link href="https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;500;600;700&display=swap" rel="stylesheet">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css">
<style> <style>
/* --- Variables Globales de Diseño --- */ /* --- Variables Globales de Diseño --- */

View file

@ -5,7 +5,7 @@
<header> <header>
<h1>Dashboard de Feeds</h1> <h1>Dashboard de Feeds</h1>
<p class="subtitle">Un resumen del estado de tu agregador de noticias.</p> <p class="subtitle">Un resumen del estado de tu agregador de noticias.</p>
<a href="{{ url_for('home') }}" class="top-link" style="margin-top:15px;">← Volver a las Noticias</a> <a href="{{ url_for('home') }}" class="top-link">← Volver a las Noticias</a>
</header> </header>
<div class="dashboard-grid"> <div class="dashboard-grid">
@ -18,18 +18,47 @@
<div class="stat-label">Noticias Recopiladas</div> <div class="stat-label">Noticias Recopiladas</div>
</div> </div>
<div class="stat-card"> <div class="stat-card">
<div class="stat-number" style="color:#c0392b;">{{ stats.feeds_caidos }}</div> <div class="stat-number" style="background: linear-gradient(135deg, #f72585 0%, #7209b7 100%); -webkit-background-clip: text; -webkit-text-fill-color: transparent;">
{{ stats.feeds_caidos }}
</div>
<div class="stat-label">Feeds Caídos / Inactivos</div> <div class="stat-label">Feeds Caídos / Inactivos</div>
</div> </div>
</div> </div>
<div class="card" style="text-align: center; padding: 30px;"> <div class="card">
<h2>Gestionar Feeds</h2> <h2 style="text-align: center; margin-bottom: 20px;">Opciones de Backup</h2>
<p style="color: var(--text-color-light);">Aquí puedes ver la lista completa, editar, añadir o eliminar tus feeds.</p> <p style="text-align: center; color: var(--text-color-light); margin-bottom: 25px;">
<div style="margin-top: 20px; display: flex; justify-content: center; gap: 15px; flex-wrap: wrap;"> Exporta tus datos para mantener copias de seguridad seguras
<a href="{{ url_for('manage_feeds') }}" class="btn">Ver Lista Detallada</a> </p>
<a href="{{ url_for('add_feed') }}" class="btn">Añadir Nuevo Feed</a>
<a href="{{ url_for('restore_feeds') }}" class="btn btn-secondary">Importar / Restaurar</a> <div style="display: flex; justify-content: center; gap: 15px; flex-wrap: wrap;">
<a href="{{ url_for('backup_feeds') }}" class="btn" style="display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-file-csv"></i> Feeds (CSV)
</a>
<a href="{{ url_for('backup_noticias') }}" class="btn" style="display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-file-csv"></i> Noticias (CSV)
</a>
<a href="{{ url_for('backup_completo') }}" class="btn" style="background: linear-gradient(135deg, #00b894 0%, #00cec9 100%); display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-file-archive"></i> Completo (ZIP)
</a>
</div>
</div>
<div class="card" style="text-align: center; margin-top: 30px;">
<h2 style="margin-bottom: 20px;">Gestionar Feeds</h2>
<p style="color: var(--text-color-light); margin-bottom: 25px;">
Administra tu lista de fuentes de noticias
</p>
<div style="display: flex; justify-content: center; gap: 15px; flex-wrap: wrap;">
<a href="{{ url_for('manage_feeds') }}" class="btn" style="display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-list"></i> Lista Completa
</a>
<a href="{{ url_for('add_feed') }}" class="btn" style="display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-plus"></i> Nuevo Feed
</a>
<a href="{{ url_for('restore_feeds') }}" class="btn btn-secondary" style="display: inline-flex; align-items: center; gap: 8px;">
<i class="fas fa-file-import"></i> Importar
</a>
</div> </div>
</div> </div>
{% endblock %} {% endblock %}