Compare commits
10 commits
55f22998b3
...
2a2df9f6e5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a2df9f6e5 | ||
|
|
4934e7fbec | ||
|
|
dbf748ab3f | ||
|
|
5855619d57 | ||
|
|
ae3febbcbd | ||
|
|
7713059ffc | ||
|
|
8dd316daf1 | ||
|
|
e52d0c61ca | ||
|
|
66011bcd0f | ||
|
|
45a1b2f234 |
4 changed files with 412 additions and 156 deletions
41
Dockerfile
41
Dockerfile
|
|
@ -1,17 +1,52 @@
|
||||||
FROM python:3.11-slim-buster
|
FROM python:3.11-slim-bullseye
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y build-essential
|
# Install system dependencies
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y build-essential curl && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
WORKDIR /rideaware_landing
|
WORKDIR /rideaware_landing
|
||||||
|
|
||||||
|
# Copy requirements first for better caching
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
ENV FLASK_APP=server.py
|
ENV FLASK_APP=server.py
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/rideaware_landing
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN useradd --create-home --shell /bin/bash app && \
|
||||||
|
chown -R app:app /rideaware_landing
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# Expose port
|
||||||
EXPOSE 5000
|
EXPOSE 5000
|
||||||
|
|
||||||
CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--workers", "4", "server:app"]
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:5000/health || exit 1
|
||||||
|
|
||||||
|
# Run with optimized Gunicorn settings
|
||||||
|
CMD ["gunicorn", \
|
||||||
|
"--bind", "0.0.0.0:5000", \
|
||||||
|
"--workers", "4", \
|
||||||
|
"--worker-class", "sync", \
|
||||||
|
"--worker-connections", "1000", \
|
||||||
|
"--max-requests", "1000", \
|
||||||
|
"--max-requests-jitter", "50", \
|
||||||
|
"--preload", \
|
||||||
|
"--timeout", "30", \
|
||||||
|
"--keep-alive", "2", \
|
||||||
|
"--access-logfile", "-", \
|
||||||
|
"--error-logfile", "-", \
|
||||||
|
"--log-level", "info", \
|
||||||
|
"server:app"]
|
||||||
188
database.py
188
database.py
|
|
@ -1,24 +1,95 @@
|
||||||
import os
|
import os
|
||||||
import psycopg2
|
import psycopg2
|
||||||
from psycopg2 import IntegrityError
|
from psycopg2 import pool, IntegrityError
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
import logging
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
def get_connection():
|
# Global connection pool
|
||||||
"""Return a database connection."""
|
_connection_pool = None
|
||||||
return psycopg2.connect(
|
|
||||||
|
def get_connection_pool():
|
||||||
|
"""Initialize and return the connection pool"""
|
||||||
|
global _connection_pool
|
||||||
|
if _connection_pool is None:
|
||||||
|
try:
|
||||||
|
_connection_pool = psycopg2.pool.ThreadedConnectionPool(
|
||||||
|
minconn=2,
|
||||||
|
maxconn=20,
|
||||||
host=os.getenv("PG_HOST"),
|
host=os.getenv("PG_HOST"),
|
||||||
port=os.getenv("PG_PORT"),
|
port=os.getenv("PG_PORT", 5432),
|
||||||
dbname=os.getenv("PG_DATABASE"),
|
dbname=os.getenv("PG_DATABASE"),
|
||||||
user=os.getenv("PG_USER"),
|
user=os.getenv("PG_USER"),
|
||||||
password=os.getenv("PG_PASSWORD"),
|
password=os.getenv("PG_PASSWORD"),
|
||||||
connect_timeout=10
|
connect_timeout=5
|
||||||
)
|
)
|
||||||
|
logging.info("Database connection pool created successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error creating connection pool: {e}")
|
||||||
|
raise
|
||||||
|
return _connection_pool
|
||||||
|
|
||||||
|
def get_connection():
|
||||||
|
"""Get a connection from the pool"""
|
||||||
|
try:
|
||||||
|
pool = get_connection_pool()
|
||||||
|
conn = pool.getconn()
|
||||||
|
if conn.closed:
|
||||||
|
# Connection is closed, remove it and get a new one
|
||||||
|
pool.putconn(conn, close=True)
|
||||||
|
conn = pool.getconn()
|
||||||
|
return conn
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error getting connection from pool: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def return_connection(conn):
|
||||||
|
"""Return a connection to the pool"""
|
||||||
|
try:
|
||||||
|
pool = get_connection_pool()
|
||||||
|
pool.putconn(conn)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error returning connection to pool: {e}")
|
||||||
|
|
||||||
|
def close_all_connections():
|
||||||
|
"""Close all connections in the pool"""
|
||||||
|
global _connection_pool
|
||||||
|
if _connection_pool:
|
||||||
|
_connection_pool.closeall()
|
||||||
|
_connection_pool = None
|
||||||
|
logging.info("All database connections closed")
|
||||||
|
|
||||||
|
def column_exists(cursor, table_name, column_name):
|
||||||
|
"""Check if a column exists in a table"""
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_name = %s AND column_name = %s
|
||||||
|
)
|
||||||
|
""", (table_name, column_name))
|
||||||
|
return cursor.fetchone()[0]
|
||||||
|
|
||||||
|
def index_exists(cursor, index_name):
|
||||||
|
"""Check if an index exists"""
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||||
|
WHERE c.relname = %s AND n.nspname = 'public'
|
||||||
|
)
|
||||||
|
""", (index_name,))
|
||||||
|
return cursor.fetchone()[0]
|
||||||
|
|
||||||
def init_db():
|
def init_db():
|
||||||
|
"""Initialize database tables and indexes"""
|
||||||
|
conn = None
|
||||||
|
try:
|
||||||
conn = get_connection()
|
conn = get_connection()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create subscribers table
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
CREATE TABLE IF NOT EXISTS subscribers (
|
CREATE TABLE IF NOT EXISTS subscribers (
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
|
|
@ -26,6 +97,15 @@ def init_db():
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
# Add created_at column if it doesn't exist
|
||||||
|
if not column_exists(cursor, 'subscribers', 'created_at'):
|
||||||
|
cursor.execute("""
|
||||||
|
ALTER TABLE subscribers
|
||||||
|
ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
""")
|
||||||
|
logging.info("Added created_at column to subscribers table")
|
||||||
|
|
||||||
|
# Create newsletters table
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
CREATE TABLE IF NOT EXISTS newsletters(
|
CREATE TABLE IF NOT EXISTS newsletters(
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
|
|
@ -35,32 +115,108 @@ def init_db():
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
# Create indexes only if they don't exist
|
||||||
|
if not index_exists(cursor, 'idx_newsletters_sent_at'):
|
||||||
|
cursor.execute("CREATE INDEX idx_newsletters_sent_at ON newsletters(sent_at DESC)")
|
||||||
|
logging.info("Created index idx_newsletters_sent_at")
|
||||||
|
|
||||||
|
if not index_exists(cursor, 'idx_subscribers_email'):
|
||||||
|
cursor.execute("CREATE INDEX idx_subscribers_email ON subscribers(email)")
|
||||||
|
logging.info("Created index idx_subscribers_email")
|
||||||
|
|
||||||
|
if not index_exists(cursor, 'idx_subscribers_created_at'):
|
||||||
|
cursor.execute("CREATE INDEX idx_subscribers_created_at ON subscribers(created_at DESC)")
|
||||||
|
logging.info("Created index idx_subscribers_created_at")
|
||||||
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
cursor.close()
|
cursor.close()
|
||||||
conn.close()
|
logging.info("Database tables and indexes initialized successfully")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error initializing database: {e}")
|
||||||
|
if conn:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
def add_email(email):
|
def add_email(email):
|
||||||
|
"""Add email to subscribers with connection pooling"""
|
||||||
|
conn = None
|
||||||
try:
|
try:
|
||||||
with get_connection() as conn:
|
conn = get_connection()
|
||||||
with conn.cursor() as cursor:
|
cursor = conn.cursor()
|
||||||
cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,))
|
cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
cursor.close()
|
||||||
|
logging.info(f"Email added successfully: {email}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
return False
|
# Email already exists
|
||||||
except psycopg2.OperationalError as e:
|
if conn:
|
||||||
print(f"Error: {e}")
|
conn.rollback()
|
||||||
|
logging.info(f"Email already exists: {email}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if conn:
|
||||||
|
conn.rollback()
|
||||||
|
logging.error(f"Error adding email {email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
def remove_email(email):
|
def remove_email(email):
|
||||||
|
"""Remove email from subscribers with connection pooling"""
|
||||||
|
conn = None
|
||||||
try:
|
try:
|
||||||
with get_connection() as conn:
|
conn = get_connection()
|
||||||
with conn.cursor() as cursor:
|
cursor = conn.cursor()
|
||||||
cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,))
|
cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
if cursor.rowcount > 0:
|
rows_affected = cursor.rowcount
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
if rows_affected > 0:
|
||||||
|
logging.info(f"Email removed successfully: {email}")
|
||||||
return True
|
return True
|
||||||
|
else:
|
||||||
|
logging.info(f"Email not found for removal: {email}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error removing email: {e}")
|
if conn:
|
||||||
|
conn.rollback()
|
||||||
|
logging.error(f"Error removing email {email}: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
|
def get_subscriber_count():
|
||||||
|
"""Get total number of subscribers"""
|
||||||
|
conn = None
|
||||||
|
try:
|
||||||
|
conn = get_connection()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM subscribers")
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
cursor.close()
|
||||||
|
return count
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error getting subscriber count: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
|
# Cleanup function for graceful shutdown
|
||||||
|
import atexit
|
||||||
|
atexit.register(close_all_connections)
|
||||||
129
server.py
129
server.py
|
|
@ -1,11 +1,12 @@
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import logging
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
from flask import Flask, render_template, request, jsonify
|
from flask import Flask, render_template, request, jsonify, g
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from database import init_db, get_connection, add_email, remove_email
|
from database import init_db, get_connection, return_connection, add_email, remove_email
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
|
|
@ -16,18 +17,14 @@ SMTP_PASSWORD = os.getenv('SMTP_PASSWORD')
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
_db_initialized = False
|
# Configure logging
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
# Cache configuration
|
||||||
_newsletter_cache = {}
|
_newsletter_cache = {}
|
||||||
_cache_timestamp = {}
|
_cache_timestamp = {}
|
||||||
CACHE_DURATION = 300
|
CACHE_DURATION = 300
|
||||||
|
|
||||||
def ensure_db_initialized():
|
|
||||||
"""Lazy database initialization - only runs on first database access"""
|
|
||||||
global _db_initialized
|
|
||||||
if not _db_initialized:
|
|
||||||
init_db()
|
|
||||||
_db_initialized = True
|
|
||||||
|
|
||||||
def get_newsletters_cached():
|
def get_newsletters_cached():
|
||||||
"""Get newsletters with caching to reduce database hits"""
|
"""Get newsletters with caching to reduce database hits"""
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
|
|
@ -36,16 +33,16 @@ def get_newsletters_cached():
|
||||||
current_time - _cache_timestamp.get('newsletters', 0) < CACHE_DURATION):
|
current_time - _cache_timestamp.get('newsletters', 0) < CACHE_DURATION):
|
||||||
return _newsletter_cache['newsletters']
|
return _newsletter_cache['newsletters']
|
||||||
|
|
||||||
ensure_db_initialized()
|
conn = None
|
||||||
|
try:
|
||||||
conn = get_connection()
|
conn = get_connection()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"SELECT id, subject, body, sent_at "
|
"SELECT id, subject, body, sent_at "
|
||||||
"FROM newsletters ORDER BY sent_at DESC"
|
"FROM newsletters ORDER BY sent_at DESC LIMIT 100"
|
||||||
)
|
)
|
||||||
rows = cursor.fetchall()
|
rows = cursor.fetchall()
|
||||||
cursor.close()
|
cursor.close()
|
||||||
conn.close()
|
|
||||||
|
|
||||||
newsletters = [
|
newsletters = [
|
||||||
{"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]}
|
{"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]}
|
||||||
|
|
@ -56,6 +53,12 @@ def get_newsletters_cached():
|
||||||
_cache_timestamp['newsletters'] = current_time
|
_cache_timestamp['newsletters'] = current_time
|
||||||
|
|
||||||
return newsletters
|
return newsletters
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Database error in get_newsletters_cached: {e}")
|
||||||
|
return []
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
def get_newsletter_by_id_cached(newsletter_id):
|
def get_newsletter_by_id_cached(newsletter_id):
|
||||||
"""Get single newsletter with caching"""
|
"""Get single newsletter with caching"""
|
||||||
|
|
@ -66,7 +69,8 @@ def get_newsletter_by_id_cached(newsletter_id):
|
||||||
current_time - _cache_timestamp.get(cache_key, 0) < CACHE_DURATION):
|
current_time - _cache_timestamp.get(cache_key, 0) < CACHE_DURATION):
|
||||||
return _newsletter_cache[cache_key]
|
return _newsletter_cache[cache_key]
|
||||||
|
|
||||||
ensure_db_initialized()
|
conn = None
|
||||||
|
try:
|
||||||
conn = get_connection()
|
conn = get_connection()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
|
|
@ -76,7 +80,6 @@ def get_newsletter_by_id_cached(newsletter_id):
|
||||||
)
|
)
|
||||||
row = cursor.fetchone()
|
row = cursor.fetchone()
|
||||||
cursor.close()
|
cursor.close()
|
||||||
conn.close()
|
|
||||||
|
|
||||||
if not row:
|
if not row:
|
||||||
return None
|
return None
|
||||||
|
|
@ -92,6 +95,12 @@ def get_newsletter_by_id_cached(newsletter_id):
|
||||||
_cache_timestamp[cache_key] = current_time
|
_cache_timestamp[cache_key] = current_time
|
||||||
|
|
||||||
return newsletter
|
return newsletter
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Database error in get_newsletter_by_id_cached: {e}")
|
||||||
|
return None
|
||||||
|
finally:
|
||||||
|
if conn:
|
||||||
|
return_connection(conn)
|
||||||
|
|
||||||
def clear_newsletter_cache():
|
def clear_newsletter_cache():
|
||||||
"""Clear newsletter cache when data is updated"""
|
"""Clear newsletter cache when data is updated"""
|
||||||
|
|
@ -103,20 +112,32 @@ def clear_newsletter_cache():
|
||||||
_cache_timestamp.pop(key, None)
|
_cache_timestamp.pop(key, None)
|
||||||
|
|
||||||
@app.before_request
|
@app.before_request
|
||||||
def start_timer():
|
def before_request():
|
||||||
request._start_time = time.time()
|
"""Start timing the request and set up request context"""
|
||||||
|
g.start_time = time.time()
|
||||||
|
|
||||||
@app.after_request
|
@app.after_request
|
||||||
def log_request(response):
|
def after_request(response):
|
||||||
elapsed = time.time() - getattr(request, '_start_time', time.time())
|
"""Log request timing and performance metrics"""
|
||||||
app.logger.info(f"{request.method} {request.path} completed in {elapsed:.3f}s")
|
total_time = time.time() - g.start_time
|
||||||
|
|
||||||
|
# Log slow requests
|
||||||
|
if total_time > 1.0:
|
||||||
|
app.logger.warning(f"Slow request: {request.method} {request.path} took {total_time:.3f}s")
|
||||||
|
elif total_time > 0.5:
|
||||||
|
app.logger.info(f"Request: {request.method} {request.path} took {total_time:.3f}s")
|
||||||
|
|
||||||
|
# Add performance headers for debugging
|
||||||
|
response.headers['X-Response-Time'] = f"{total_time:.3f}s"
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def send_confirmation_email(to_address: str, unsubscribe_link: str):
|
def send_confirmation_email(to_address: str, unsubscribe_link: str):
|
||||||
"""
|
"""
|
||||||
Sends the HTML confirmation email to `to_address`.
|
Sends the HTML confirmation email to `to_address`.
|
||||||
This runs inside its own SMTP_SSL connection (timeout=10s).
|
This runs inside its own SMTP_SSL connection with reduced timeout.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
subject = "Thanks for subscribing!"
|
subject = "Thanks for subscribing!"
|
||||||
html_body = render_template(
|
html_body = render_template(
|
||||||
"confirmation_email.html",
|
"confirmation_email.html",
|
||||||
|
|
@ -128,10 +149,12 @@ def send_confirmation_email(to_address: str, unsubscribe_link: str):
|
||||||
msg["From"] = SMTP_USER
|
msg["From"] = SMTP_USER
|
||||||
msg["To"] = to_address
|
msg["To"] = to_address
|
||||||
|
|
||||||
try:
|
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=5) as server:
|
||||||
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=10) as server:
|
|
||||||
server.login(SMTP_USER, SMTP_PASSWORD)
|
server.login(SMTP_USER, SMTP_PASSWORD)
|
||||||
server.sendmail(SMTP_USER, [to_address], msg.as_string())
|
server.sendmail(SMTP_USER, [to_address], msg.as_string())
|
||||||
|
|
||||||
|
app.logger.info(f"Confirmation email sent successfully to {to_address}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
app.logger.error(f"Failed to send email to {to_address}: {e}")
|
app.logger.error(f"Failed to send email to {to_address}: {e}")
|
||||||
|
|
||||||
|
|
@ -148,17 +171,22 @@ def index():
|
||||||
|
|
||||||
@app.route("/subscribe", methods=["POST"])
|
@app.route("/subscribe", methods=["POST"])
|
||||||
def subscribe():
|
def subscribe():
|
||||||
"""Subscribe endpoint - lazy loads database only when needed"""
|
"""Subscribe endpoint with optimized database handling"""
|
||||||
data = request.get_json() or {}
|
data = request.get_json() or {}
|
||||||
email = data.get("email")
|
email = data.get("email")
|
||||||
|
|
||||||
if not email:
|
if not email:
|
||||||
return jsonify(error="No email provided"), 400
|
return jsonify(error="No email provided"), 400
|
||||||
|
|
||||||
ensure_db_initialized()
|
# Validate email format (basic check)
|
||||||
|
if "@" not in email or "." not in email.split("@")[-1]:
|
||||||
|
return jsonify(error="Invalid email format"), 400
|
||||||
|
|
||||||
|
try:
|
||||||
if add_email(email):
|
if add_email(email):
|
||||||
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}"
|
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}"
|
||||||
|
|
||||||
|
# Start email sending in background thread
|
||||||
Thread(
|
Thread(
|
||||||
target=send_confirmation_async,
|
target=send_confirmation_async,
|
||||||
args=(email, unsubscribe_link),
|
args=(email, unsubscribe_link),
|
||||||
|
|
@ -166,47 +194,90 @@ def subscribe():
|
||||||
).start()
|
).start()
|
||||||
|
|
||||||
return jsonify(message="Email has been added"), 201
|
return jsonify(message="Email has been added"), 201
|
||||||
|
else:
|
||||||
return jsonify(error="Email already exists"), 400
|
return jsonify(error="Email already exists"), 400
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error in subscribe endpoint: {e}")
|
||||||
|
return jsonify(error="Internal server error"), 500
|
||||||
|
|
||||||
@app.route("/unsubscribe", methods=["GET"])
|
@app.route("/unsubscribe", methods=["GET"])
|
||||||
def unsubscribe():
|
def unsubscribe():
|
||||||
"""Unsubscribe endpoint - lazy loads database only when needed"""
|
"""Unsubscribe endpoint with optimized database handling"""
|
||||||
email = request.args.get("email")
|
email = request.args.get("email")
|
||||||
|
|
||||||
if not email:
|
if not email:
|
||||||
return "No email specified.", 400
|
return "No email specified.", 400
|
||||||
|
|
||||||
ensure_db_initialized()
|
try:
|
||||||
|
|
||||||
if remove_email(email):
|
if remove_email(email):
|
||||||
return f"The email {email} has been unsubscribed.", 200
|
return f"The email {email} has been unsubscribed.", 200
|
||||||
|
else:
|
||||||
return f"Email {email} was not found or has already been unsubscribed.", 400
|
return f"Email {email} was not found or has already been unsubscribed.", 400
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error in unsubscribe endpoint: {e}")
|
||||||
|
return "Internal server error", 500
|
||||||
|
|
||||||
@app.route("/newsletters", methods=["GET"])
|
@app.route("/newsletters", methods=["GET"])
|
||||||
def newsletters():
|
def newsletters():
|
||||||
"""
|
"""
|
||||||
List all newsletters (newest first) with caching for better performance.
|
List all newsletters (newest first) with caching for better performance.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
newsletters = get_newsletters_cached()
|
newsletters = get_newsletters_cached()
|
||||||
return render_template("newsletters.html", newsletters=newsletters)
|
return render_template("newsletters.html", newsletters=newsletters)
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error in newsletters endpoint: {e}")
|
||||||
|
return "Internal server error", 500
|
||||||
|
|
||||||
@app.route("/newsletter/<int:newsletter_id>", methods=["GET"])
|
@app.route("/newsletter/<int:newsletter_id>", methods=["GET"])
|
||||||
def newsletter_detail(newsletter_id):
|
def newsletter_detail(newsletter_id):
|
||||||
"""
|
"""
|
||||||
Show a single newsletter by its ID with caching.
|
Show a single newsletter by its ID with caching.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
newsletter = get_newsletter_by_id_cached(newsletter_id)
|
newsletter = get_newsletter_by_id_cached(newsletter_id)
|
||||||
|
|
||||||
if not newsletter:
|
if not newsletter:
|
||||||
return "Newsletter not found.", 404
|
return "Newsletter not found.", 404
|
||||||
|
|
||||||
return render_template("newsletter_detail.html", newsletter=newsletter)
|
return render_template("newsletter_detail.html", newsletter=newsletter)
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error in newsletter_detail endpoint: {e}")
|
||||||
|
return "Internal server error", 500
|
||||||
|
|
||||||
@app.route("/admin/clear-cache", methods=["POST"])
|
@app.route("/admin/clear-cache", methods=["POST"])
|
||||||
def clear_cache():
|
def clear_cache():
|
||||||
"""Admin endpoint to clear newsletter cache"""
|
"""Admin endpoint to clear newsletter cache"""
|
||||||
|
try:
|
||||||
clear_newsletter_cache()
|
clear_newsletter_cache()
|
||||||
return jsonify(message="Cache cleared successfully"), 200
|
return jsonify(message="Cache cleared successfully"), 200
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error clearing cache: {e}")
|
||||||
|
return jsonify(error="Failed to clear cache"), 500
|
||||||
|
|
||||||
|
@app.route("/health", methods=["GET"])
|
||||||
|
def health_check():
|
||||||
|
"""Health check endpoint for monitoring"""
|
||||||
|
return jsonify(status="healthy", timestamp=time.time()), 200
|
||||||
|
|
||||||
|
# Error handlers
|
||||||
|
@app.errorhandler(404)
|
||||||
|
def not_found(error):
|
||||||
|
return jsonify(error="Not found"), 404
|
||||||
|
|
||||||
|
@app.errorhandler(500)
|
||||||
|
def internal_error(error):
|
||||||
|
return jsonify(error="Internal server error"), 500
|
||||||
|
|
||||||
|
# Initialize database at startup
|
||||||
|
try:
|
||||||
|
init_db()
|
||||||
|
app.logger.info("Database initialized successfully")
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Failed to initialize database: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app.run(host="0.0.0.0", debug=True)
|
app.run(host="0.0.0.0", debug=True)
|
||||||
|
|
@ -6,14 +6,8 @@
|
||||||
<title>RideAware - Smart Cycling Training Platform</title>
|
<title>RideAware - Smart Cycling Training Platform</title>
|
||||||
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
|
||||||
<link rel="preconnect" href="https://cdn.statically.io" crossorigin>
|
<link rel="preconnect" href="https://cdn.statically.io" crossorigin>
|
||||||
|
<link rel="stylesheet" href="{{ url_for('static', filename='css/styles.css') }}">
|
||||||
|
|
||||||
<link rel="preload" as="style"
|
|
||||||
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css"
|
|
||||||
onload="this.onload=null;this.rel='stylesheet'">
|
|
||||||
<noscript>
|
|
||||||
<link rel="stylesheet"
|
|
||||||
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css">
|
|
||||||
</noscript>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<!-- Navigation -->
|
<!-- Navigation -->
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue