arreglo de ui y busquedas

This commit is contained in:
jlimolina 2025-11-21 04:42:02 +01:00
parent cb8f69fb93
commit fc06566928
15 changed files with 1115 additions and 435 deletions

98
app.py
View file

@ -3,6 +3,7 @@ import csv
import io
import time
import socket
import zipfile
from datetime import datetime, date
from concurrent.futures import ThreadPoolExecutor, as_completed
@ -337,21 +338,48 @@ def home():
params.append(int(continente_id))
if q:
where.append("n.tsv @@ plainto_tsquery('spanish', %s)")
params.append(q)
search_like = f"%{q}%"
if use_tr:
where.append(
"""
(
n.tsv @@ websearch_to_tsquery('spanish', %s)
OR t.titulo_trad ILIKE %s
OR t.resumen_trad ILIKE %s
OR n.titulo ILIKE %s
OR n.resumen ILIKE %s
)
"""
)
params.extend([q, search_like, search_like, search_like, search_like])
else:
where.append(
"""
(
n.tsv @@ websearch_to_tsquery('spanish', %s)
OR n.titulo ILIKE %s
OR n.resumen ILIKE %s
)
"""
)
params.extend([q, search_like, search_like])
where_sql = " AND ".join(where)
with conn.cursor(cursor_factory=extras.DictCursor) as cur:
cur.execute(
f"""
SELECT COUNT(*)
SELECT COUNT(DISTINCT n.id)
FROM noticias n
LEFT JOIN categorias c ON c.id = n.categoria_id
LEFT JOIN paises p ON p.id = n.pais_id
LEFT JOIN traducciones t
ON t.noticia_id = n.id
AND t.lang_to = %s
AND t.status = 'done'
WHERE {where_sql}
""",
params,
[lang] + params,
)
total_results = cur.fetchone()[0] if cur.rowcount else 0
total_pages = (total_results // per_page) + (1 if total_results % per_page else 0)
@ -925,7 +953,7 @@ def add_url_source():
"""
INSERT INTO fuentes_url (nombre, url, categoria_id, pais_id, idioma)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (url_norm) DO UPDATE
ON CONFLICT (url) DO UPDATE
SET nombre = EXCLUDED.nombre,
categoria_id = EXCLUDED.categoria_id,
pais_id = EXCLUDED.pais_id,
@ -1172,8 +1200,6 @@ def scrape_url():
@app.route("/backup_completo")
def backup_completo():
import zipfile
mem_file = io.BytesIO()
with zipfile.ZipFile(mem_file, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
with get_conn() as conn, conn.cursor(cursor_factory=extras.DictCursor) as cur:
@ -1207,6 +1233,64 @@ def backup_completo():
)
@app.route("/restore_completo", methods=["GET", "POST"])
def restore_completo():
if request.method == "GET":
return render_template("restore_completo.html")
file = request.files.get("backup_file")
if not file or file.filename == "":
flash("No se ha seleccionado ningún archivo.", "error")
return redirect(url_for("restore_completo"))
filename = file.filename.lower()
if not filename.endswith(".zip"):
flash("El archivo debe ser un .zip.", "error")
return redirect(url_for("restore_completo"))
raw = file.read()
try:
zf = zipfile.ZipFile(io.BytesIO(raw))
except zipfile.BadZipFile:
flash("El archivo no es un .zip válido.", "error")
return redirect(url_for("restore_completo"))
restored_counts = {}
conn = get_conn()
try:
with conn:
with conn.cursor() as cur:
if "feeds.csv" in zf.namelist():
cur.execute("TRUNCATE TABLE feeds RESTART IDENTITY;")
with zf.open("feeds.csv") as f:
text_f = io.TextIOWrapper(f, encoding="utf-8")
cur.copy_expert("COPY feeds FROM STDIN CSV HEADER", text_f)
restored_counts["feeds"] = cur.rowcount if cur.rowcount is not None else 0
if "fuentes_url.csv" in zf.namelist():
cur.execute("TRUNCATE TABLE fuentes_url RESTART IDENTITY;")
with zf.open("fuentes_url.csv") as f2:
text_f2 = io.TextIOWrapper(f2, encoding="utf-8")
cur.copy_expert("COPY fuentes_url FROM STDIN CSV HEADER", text_f2)
restored_counts["fuentes_url"] = cur.rowcount if cur.rowcount is not None else 0
except Exception as e:
conn.rollback()
conn.close()
flash(f"Error al restaurar el backup: {e}", "error")
return redirect(url_for("restore_completo"))
conn.close()
if restored_counts:
partes = [f"{tabla}: {n} filas" for tabla, n in restored_counts.items()]
flash("Restauración completada: " + ", ".join(partes), "success")
else:
flash("Backup procesado pero no se encontraron ficheros reconocidos (feeds.csv, fuentes_url.csv).", "warning")
return redirect(url_for("dashboard"))
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8001, debug=True)