Compare commits
10 commits
55f22998b3
...
2a2df9f6e5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a2df9f6e5 | ||
|
|
4934e7fbec | ||
|
|
dbf748ab3f | ||
|
|
5855619d57 | ||
|
|
ae3febbcbd | ||
|
|
7713059ffc | ||
|
|
8dd316daf1 | ||
|
|
e52d0c61ca | ||
|
|
66011bcd0f | ||
|
|
45a1b2f234 |
4 changed files with 412 additions and 156 deletions
41
Dockerfile
41
Dockerfile
|
|
@ -1,17 +1,52 @@
|
|||
FROM python:3.11-slim-buster
|
||||
FROM python:3.11-slim-bullseye
|
||||
|
||||
RUN apt-get update && apt-get install -y build-essential
|
||||
# Install system dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential curl && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /rideaware_landing
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Set environment variables
|
||||
ENV FLASK_APP=server.py
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/rideaware_landing
|
||||
|
||||
# Create non-root user for security
|
||||
RUN useradd --create-home --shell /bin/bash app && \
|
||||
chown -R app:app /rideaware_landing
|
||||
USER app
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--workers", "4", "server:app"]
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:5000/health || exit 1
|
||||
|
||||
# Run with optimized Gunicorn settings
|
||||
CMD ["gunicorn", \
|
||||
"--bind", "0.0.0.0:5000", \
|
||||
"--workers", "4", \
|
||||
"--worker-class", "sync", \
|
||||
"--worker-connections", "1000", \
|
||||
"--max-requests", "1000", \
|
||||
"--max-requests-jitter", "50", \
|
||||
"--preload", \
|
||||
"--timeout", "30", \
|
||||
"--keep-alive", "2", \
|
||||
"--access-logfile", "-", \
|
||||
"--error-logfile", "-", \
|
||||
"--log-level", "info", \
|
||||
"server:app"]
|
||||
246
database.py
246
database.py
|
|
@ -1,66 +1,222 @@
|
|||
import os
|
||||
import psycopg2
|
||||
from psycopg2 import IntegrityError
|
||||
from psycopg2 import pool, IntegrityError
|
||||
from dotenv import load_dotenv
|
||||
import logging
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Global connection pool
|
||||
_connection_pool = None
|
||||
|
||||
def get_connection_pool():
|
||||
"""Initialize and return the connection pool"""
|
||||
global _connection_pool
|
||||
if _connection_pool is None:
|
||||
try:
|
||||
_connection_pool = psycopg2.pool.ThreadedConnectionPool(
|
||||
minconn=2,
|
||||
maxconn=20,
|
||||
host=os.getenv("PG_HOST"),
|
||||
port=os.getenv("PG_PORT", 5432),
|
||||
dbname=os.getenv("PG_DATABASE"),
|
||||
user=os.getenv("PG_USER"),
|
||||
password=os.getenv("PG_PASSWORD"),
|
||||
connect_timeout=5
|
||||
)
|
||||
logging.info("Database connection pool created successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Error creating connection pool: {e}")
|
||||
raise
|
||||
return _connection_pool
|
||||
|
||||
def get_connection():
|
||||
"""Return a database connection."""
|
||||
return psycopg2.connect(
|
||||
host=os.getenv("PG_HOST"),
|
||||
port=os.getenv("PG_PORT"),
|
||||
dbname=os.getenv("PG_DATABASE"),
|
||||
user=os.getenv("PG_USER"),
|
||||
password=os.getenv("PG_PASSWORD"),
|
||||
connect_timeout=10
|
||||
)
|
||||
"""Get a connection from the pool"""
|
||||
try:
|
||||
pool = get_connection_pool()
|
||||
conn = pool.getconn()
|
||||
if conn.closed:
|
||||
# Connection is closed, remove it and get a new one
|
||||
pool.putconn(conn, close=True)
|
||||
conn = pool.getconn()
|
||||
return conn
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting connection from pool: {e}")
|
||||
raise
|
||||
|
||||
def return_connection(conn):
|
||||
"""Return a connection to the pool"""
|
||||
try:
|
||||
pool = get_connection_pool()
|
||||
pool.putconn(conn)
|
||||
except Exception as e:
|
||||
logging.error(f"Error returning connection to pool: {e}")
|
||||
|
||||
def close_all_connections():
|
||||
"""Close all connections in the pool"""
|
||||
global _connection_pool
|
||||
if _connection_pool:
|
||||
_connection_pool.closeall()
|
||||
_connection_pool = None
|
||||
logging.info("All database connections closed")
|
||||
|
||||
def column_exists(cursor, table_name, column_name):
|
||||
"""Check if a column exists in a table"""
|
||||
cursor.execute("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = %s AND column_name = %s
|
||||
)
|
||||
""", (table_name, column_name))
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def index_exists(cursor, index_name):
|
||||
"""Check if an index exists"""
|
||||
cursor.execute("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
WHERE c.relname = %s AND n.nspname = 'public'
|
||||
)
|
||||
""", (index_name,))
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def init_db():
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS subscribers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
email TEXT UNIQUE NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS newsletters(
|
||||
id SERIAL PRIMARY KEY,
|
||||
subject TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
"""Initialize database tables and indexes"""
|
||||
conn = None
|
||||
try:
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
# Create subscribers table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS subscribers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
email TEXT UNIQUE NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
# Add created_at column if it doesn't exist
|
||||
if not column_exists(cursor, 'subscribers', 'created_at'):
|
||||
cursor.execute("""
|
||||
ALTER TABLE subscribers
|
||||
ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
""")
|
||||
logging.info("Added created_at column to subscribers table")
|
||||
|
||||
# Create newsletters table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS newsletters(
|
||||
id SERIAL PRIMARY KEY,
|
||||
subject TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
# Create indexes only if they don't exist
|
||||
if not index_exists(cursor, 'idx_newsletters_sent_at'):
|
||||
cursor.execute("CREATE INDEX idx_newsletters_sent_at ON newsletters(sent_at DESC)")
|
||||
logging.info("Created index idx_newsletters_sent_at")
|
||||
|
||||
if not index_exists(cursor, 'idx_subscribers_email'):
|
||||
cursor.execute("CREATE INDEX idx_subscribers_email ON subscribers(email)")
|
||||
logging.info("Created index idx_subscribers_email")
|
||||
|
||||
if not index_exists(cursor, 'idx_subscribers_created_at'):
|
||||
cursor.execute("CREATE INDEX idx_subscribers_created_at ON subscribers(created_at DESC)")
|
||||
logging.info("Created index idx_subscribers_created_at")
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
logging.info("Database tables and indexes initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error initializing database: {e}")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
def add_email(email):
|
||||
"""Add email to subscribers with connection pooling"""
|
||||
conn = None
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,))
|
||||
conn.commit()
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,))
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
logging.info(f"Email added successfully: {email}")
|
||||
return True
|
||||
|
||||
except IntegrityError:
|
||||
# Email already exists
|
||||
if conn:
|
||||
conn.rollback()
|
||||
logging.info(f"Email already exists: {email}")
|
||||
return False
|
||||
except psycopg2.OperationalError as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
logging.error(f"Error adding email {email}: {e}")
|
||||
return False
|
||||
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
def remove_email(email):
|
||||
"""Remove email from subscribers with connection pooling"""
|
||||
conn = None
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,))
|
||||
conn.commit()
|
||||
if cursor.rowcount > 0:
|
||||
return True
|
||||
return False
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,))
|
||||
conn.commit()
|
||||
rows_affected = cursor.rowcount
|
||||
cursor.close()
|
||||
|
||||
if rows_affected > 0:
|
||||
logging.info(f"Email removed successfully: {email}")
|
||||
return True
|
||||
else:
|
||||
logging.info(f"Email not found for removal: {email}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error removing email: {e}")
|
||||
return False
|
||||
if conn:
|
||||
conn.rollback()
|
||||
logging.error(f"Error removing email {email}: {e}")
|
||||
return False
|
||||
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
def get_subscriber_count():
|
||||
"""Get total number of subscribers"""
|
||||
conn = None
|
||||
try:
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT COUNT(*) FROM subscribers")
|
||||
count = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting subscriber count: {e}")
|
||||
return 0
|
||||
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
# Cleanup function for graceful shutdown
|
||||
import atexit
|
||||
atexit.register(close_all_connections)
|
||||
273
server.py
273
server.py
|
|
@ -1,11 +1,12 @@
|
|||
import os
|
||||
import time
|
||||
import logging
|
||||
from threading import Thread
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from flask import Flask, render_template, request, jsonify
|
||||
from flask import Flask, render_template, request, jsonify, g
|
||||
from dotenv import load_dotenv
|
||||
from database import init_db, get_connection, add_email, remove_email
|
||||
from database import init_db, get_connection, return_connection, add_email, remove_email
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
|
@ -16,18 +17,14 @@ SMTP_PASSWORD = os.getenv('SMTP_PASSWORD')
|
|||
|
||||
app = Flask(__name__)
|
||||
|
||||
_db_initialized = False
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# Cache configuration
|
||||
_newsletter_cache = {}
|
||||
_cache_timestamp = {}
|
||||
CACHE_DURATION = 300
|
||||
|
||||
def ensure_db_initialized():
|
||||
"""Lazy database initialization - only runs on first database access"""
|
||||
global _db_initialized
|
||||
if not _db_initialized:
|
||||
init_db()
|
||||
_db_initialized = True
|
||||
|
||||
def get_newsletters_cached():
|
||||
"""Get newsletters with caching to reduce database hits"""
|
||||
current_time = time.time()
|
||||
|
|
@ -36,26 +33,32 @@ def get_newsletters_cached():
|
|||
current_time - _cache_timestamp.get('newsletters', 0) < CACHE_DURATION):
|
||||
return _newsletter_cache['newsletters']
|
||||
|
||||
ensure_db_initialized()
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT id, subject, body, sent_at "
|
||||
"FROM newsletters ORDER BY sent_at DESC"
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
newsletters = [
|
||||
{"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
_newsletter_cache['newsletters'] = newsletters
|
||||
_cache_timestamp['newsletters'] = current_time
|
||||
|
||||
return newsletters
|
||||
conn = None
|
||||
try:
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT id, subject, body, sent_at "
|
||||
"FROM newsletters ORDER BY sent_at DESC LIMIT 100"
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
newsletters = [
|
||||
{"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
_newsletter_cache['newsletters'] = newsletters
|
||||
_cache_timestamp['newsletters'] = current_time
|
||||
|
||||
return newsletters
|
||||
except Exception as e:
|
||||
app.logger.error(f"Database error in get_newsletters_cached: {e}")
|
||||
return []
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
def get_newsletter_by_id_cached(newsletter_id):
|
||||
"""Get single newsletter with caching"""
|
||||
|
|
@ -66,32 +69,38 @@ def get_newsletter_by_id_cached(newsletter_id):
|
|||
current_time - _cache_timestamp.get(cache_key, 0) < CACHE_DURATION):
|
||||
return _newsletter_cache[cache_key]
|
||||
|
||||
ensure_db_initialized()
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT id, subject, body, sent_at "
|
||||
"FROM newsletters WHERE id = %s",
|
||||
(newsletter_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
if not row:
|
||||
return None
|
||||
conn = None
|
||||
try:
|
||||
conn = get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT id, subject, body, sent_at "
|
||||
"FROM newsletters WHERE id = %s",
|
||||
(newsletter_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
cursor.close()
|
||||
|
||||
newsletter = {
|
||||
"id": row[0],
|
||||
"subject": row[1],
|
||||
"body": row[2],
|
||||
"sent_at": row[3]
|
||||
}
|
||||
|
||||
_newsletter_cache[cache_key] = newsletter
|
||||
_cache_timestamp[cache_key] = current_time
|
||||
|
||||
return newsletter
|
||||
if not row:
|
||||
return None
|
||||
|
||||
newsletter = {
|
||||
"id": row[0],
|
||||
"subject": row[1],
|
||||
"body": row[2],
|
||||
"sent_at": row[3]
|
||||
}
|
||||
|
||||
_newsletter_cache[cache_key] = newsletter
|
||||
_cache_timestamp[cache_key] = current_time
|
||||
|
||||
return newsletter
|
||||
except Exception as e:
|
||||
app.logger.error(f"Database error in get_newsletter_by_id_cached: {e}")
|
||||
return None
|
||||
finally:
|
||||
if conn:
|
||||
return_connection(conn)
|
||||
|
||||
def clear_newsletter_cache():
|
||||
"""Clear newsletter cache when data is updated"""
|
||||
|
|
@ -103,35 +112,49 @@ def clear_newsletter_cache():
|
|||
_cache_timestamp.pop(key, None)
|
||||
|
||||
@app.before_request
|
||||
def start_timer():
|
||||
request._start_time = time.time()
|
||||
def before_request():
|
||||
"""Start timing the request and set up request context"""
|
||||
g.start_time = time.time()
|
||||
|
||||
@app.after_request
|
||||
def log_request(response):
|
||||
elapsed = time.time() - getattr(request, '_start_time', time.time())
|
||||
app.logger.info(f"{request.method} {request.path} completed in {elapsed:.3f}s")
|
||||
def after_request(response):
|
||||
"""Log request timing and performance metrics"""
|
||||
total_time = time.time() - g.start_time
|
||||
|
||||
# Log slow requests
|
||||
if total_time > 1.0:
|
||||
app.logger.warning(f"Slow request: {request.method} {request.path} took {total_time:.3f}s")
|
||||
elif total_time > 0.5:
|
||||
app.logger.info(f"Request: {request.method} {request.path} took {total_time:.3f}s")
|
||||
|
||||
# Add performance headers for debugging
|
||||
response.headers['X-Response-Time'] = f"{total_time:.3f}s"
|
||||
|
||||
return response
|
||||
|
||||
def send_confirmation_email(to_address: str, unsubscribe_link: str):
|
||||
"""
|
||||
Sends the HTML confirmation email to `to_address`.
|
||||
This runs inside its own SMTP_SSL connection (timeout=10s).
|
||||
This runs inside its own SMTP_SSL connection with reduced timeout.
|
||||
"""
|
||||
subject = "Thanks for subscribing!"
|
||||
html_body = render_template(
|
||||
"confirmation_email.html",
|
||||
unsubscribe_link=unsubscribe_link
|
||||
)
|
||||
|
||||
msg = MIMEText(html_body, "html", "utf-8")
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = SMTP_USER
|
||||
msg["To"] = to_address
|
||||
|
||||
try:
|
||||
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=10) as server:
|
||||
subject = "Thanks for subscribing!"
|
||||
html_body = render_template(
|
||||
"confirmation_email.html",
|
||||
unsubscribe_link=unsubscribe_link
|
||||
)
|
||||
|
||||
msg = MIMEText(html_body, "html", "utf-8")
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = SMTP_USER
|
||||
msg["To"] = to_address
|
||||
|
||||
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=5) as server:
|
||||
server.login(SMTP_USER, SMTP_PASSWORD)
|
||||
server.sendmail(SMTP_USER, [to_address], msg.as_string())
|
||||
|
||||
app.logger.info(f"Confirmation email sent successfully to {to_address}")
|
||||
|
||||
except Exception as e:
|
||||
app.logger.error(f"Failed to send email to {to_address}: {e}")
|
||||
|
||||
|
|
@ -148,65 +171,113 @@ def index():
|
|||
|
||||
@app.route("/subscribe", methods=["POST"])
|
||||
def subscribe():
|
||||
"""Subscribe endpoint - lazy loads database only when needed"""
|
||||
"""Subscribe endpoint with optimized database handling"""
|
||||
data = request.get_json() or {}
|
||||
email = data.get("email")
|
||||
|
||||
if not email:
|
||||
return jsonify(error="No email provided"), 400
|
||||
|
||||
ensure_db_initialized()
|
||||
# Validate email format (basic check)
|
||||
if "@" not in email or "." not in email.split("@")[-1]:
|
||||
return jsonify(error="Invalid email format"), 400
|
||||
|
||||
if add_email(email):
|
||||
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}"
|
||||
try:
|
||||
if add_email(email):
|
||||
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}"
|
||||
|
||||
Thread(
|
||||
target=send_confirmation_async,
|
||||
args=(email, unsubscribe_link),
|
||||
daemon=True
|
||||
).start()
|
||||
# Start email sending in background thread
|
||||
Thread(
|
||||
target=send_confirmation_async,
|
||||
args=(email, unsubscribe_link),
|
||||
daemon=True
|
||||
).start()
|
||||
|
||||
return jsonify(message="Email has been added"), 201
|
||||
|
||||
return jsonify(error="Email already exists"), 400
|
||||
return jsonify(message="Email has been added"), 201
|
||||
else:
|
||||
return jsonify(error="Email already exists"), 400
|
||||
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error in subscribe endpoint: {e}")
|
||||
return jsonify(error="Internal server error"), 500
|
||||
|
||||
@app.route("/unsubscribe", methods=["GET"])
|
||||
def unsubscribe():
|
||||
"""Unsubscribe endpoint - lazy loads database only when needed"""
|
||||
"""Unsubscribe endpoint with optimized database handling"""
|
||||
email = request.args.get("email")
|
||||
|
||||
if not email:
|
||||
return "No email specified.", 400
|
||||
|
||||
ensure_db_initialized()
|
||||
|
||||
if remove_email(email):
|
||||
return f"The email {email} has been unsubscribed.", 200
|
||||
return f"Email {email} was not found or has already been unsubscribed.", 400
|
||||
try:
|
||||
if remove_email(email):
|
||||
return f"The email {email} has been unsubscribed.", 200
|
||||
else:
|
||||
return f"Email {email} was not found or has already been unsubscribed.", 400
|
||||
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error in unsubscribe endpoint: {e}")
|
||||
return "Internal server error", 500
|
||||
|
||||
@app.route("/newsletters", methods=["GET"])
|
||||
def newsletters():
|
||||
"""
|
||||
List all newsletters (newest first) with caching for better performance.
|
||||
"""
|
||||
newsletters = get_newsletters_cached()
|
||||
return render_template("newsletters.html", newsletters=newsletters)
|
||||
try:
|
||||
newsletters = get_newsletters_cached()
|
||||
return render_template("newsletters.html", newsletters=newsletters)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error in newsletters endpoint: {e}")
|
||||
return "Internal server error", 500
|
||||
|
||||
@app.route("/newsletter/<int:newsletter_id>", methods=["GET"])
|
||||
def newsletter_detail(newsletter_id):
|
||||
"""
|
||||
Show a single newsletter by its ID with caching.
|
||||
"""
|
||||
newsletter = get_newsletter_by_id_cached(newsletter_id)
|
||||
|
||||
if not newsletter:
|
||||
return "Newsletter not found.", 404
|
||||
try:
|
||||
newsletter = get_newsletter_by_id_cached(newsletter_id)
|
||||
|
||||
if not newsletter:
|
||||
return "Newsletter not found.", 404
|
||||
|
||||
return render_template("newsletter_detail.html", newsletter=newsletter)
|
||||
return render_template("newsletter_detail.html", newsletter=newsletter)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error in newsletter_detail endpoint: {e}")
|
||||
return "Internal server error", 500
|
||||
|
||||
@app.route("/admin/clear-cache", methods=["POST"])
|
||||
def clear_cache():
|
||||
"""Admin endpoint to clear newsletter cache"""
|
||||
clear_newsletter_cache()
|
||||
return jsonify(message="Cache cleared successfully"), 200
|
||||
try:
|
||||
clear_newsletter_cache()
|
||||
return jsonify(message="Cache cleared successfully"), 200
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error clearing cache: {e}")
|
||||
return jsonify(error="Failed to clear cache"), 500
|
||||
|
||||
@app.route("/health", methods=["GET"])
|
||||
def health_check():
|
||||
"""Health check endpoint for monitoring"""
|
||||
return jsonify(status="healthy", timestamp=time.time()), 200
|
||||
|
||||
# Error handlers
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
return jsonify(error="Not found"), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
return jsonify(error="Internal server error"), 500
|
||||
|
||||
# Initialize database at startup
|
||||
try:
|
||||
init_db()
|
||||
app.logger.info("Database initialized successfully")
|
||||
except Exception as e:
|
||||
app.logger.error(f"Failed to initialize database: {e}")
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", debug=True)
|
||||
app.run(host="0.0.0.0", debug=True)
|
||||
|
|
|
|||
|
|
@ -6,14 +6,8 @@
|
|||
<title>RideAware - Smart Cycling Training Platform</title>
|
||||
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
|
||||
<link rel="preconnect" href="https://cdn.statically.io" crossorigin>
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/styles.css') }}">
|
||||
|
||||
<link rel="preload" as="style"
|
||||
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css"
|
||||
onload="this.onload=null;this.rel='stylesheet'">
|
||||
<noscript>
|
||||
<link rel="stylesheet"
|
||||
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css">
|
||||
</noscript>
|
||||
</head>
|
||||
<body>
|
||||
<!-- Navigation -->
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue