Compare commits

..

10 commits

4 changed files with 412 additions and 156 deletions

View file

@ -1,17 +1,52 @@
FROM python:3.11-slim-buster FROM python:3.11-slim-bullseye
RUN apt-get update && apt-get install -y build-essential # Install system dependencies
RUN apt-get update && \
apt-get install -y build-essential curl && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /rideaware_landing WORKDIR /rideaware_landing
# Copy requirements first for better caching
COPY requirements.txt . COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . . COPY . .
# Set environment variables
ENV FLASK_APP=server.py ENV FLASK_APP=server.py
ENV PYTHONUNBUFFERED=1
ENV PYTHONPATH=/rideaware_landing
# Create non-root user for security
RUN useradd --create-home --shell /bin/bash app && \
chown -R app:app /rideaware_landing
USER app
# Expose port
EXPOSE 5000 EXPOSE 5000
CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--workers", "4", "server:app"] # Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5000/health || exit 1
# Run with optimized Gunicorn settings
CMD ["gunicorn", \
"--bind", "0.0.0.0:5000", \
"--workers", "4", \
"--worker-class", "sync", \
"--worker-connections", "1000", \
"--max-requests", "1000", \
"--max-requests-jitter", "50", \
"--preload", \
"--timeout", "30", \
"--keep-alive", "2", \
"--access-logfile", "-", \
"--error-logfile", "-", \
"--log-level", "info", \
"server:app"]

View file

@ -1,66 +1,222 @@
import os import os
import psycopg2 import psycopg2
from psycopg2 import IntegrityError from psycopg2 import pool, IntegrityError
from dotenv import load_dotenv from dotenv import load_dotenv
import logging
load_dotenv() load_dotenv()
# Global connection pool
_connection_pool = None
def get_connection_pool():
"""Initialize and return the connection pool"""
global _connection_pool
if _connection_pool is None:
try:
_connection_pool = psycopg2.pool.ThreadedConnectionPool(
minconn=2,
maxconn=20,
host=os.getenv("PG_HOST"),
port=os.getenv("PG_PORT", 5432),
dbname=os.getenv("PG_DATABASE"),
user=os.getenv("PG_USER"),
password=os.getenv("PG_PASSWORD"),
connect_timeout=5
)
logging.info("Database connection pool created successfully")
except Exception as e:
logging.error(f"Error creating connection pool: {e}")
raise
return _connection_pool
def get_connection(): def get_connection():
"""Return a database connection.""" """Get a connection from the pool"""
return psycopg2.connect( try:
host=os.getenv("PG_HOST"), pool = get_connection_pool()
port=os.getenv("PG_PORT"), conn = pool.getconn()
dbname=os.getenv("PG_DATABASE"), if conn.closed:
user=os.getenv("PG_USER"), # Connection is closed, remove it and get a new one
password=os.getenv("PG_PASSWORD"), pool.putconn(conn, close=True)
connect_timeout=10 conn = pool.getconn()
) return conn
except Exception as e:
logging.error(f"Error getting connection from pool: {e}")
raise
def return_connection(conn):
"""Return a connection to the pool"""
try:
pool = get_connection_pool()
pool.putconn(conn)
except Exception as e:
logging.error(f"Error returning connection to pool: {e}")
def close_all_connections():
"""Close all connections in the pool"""
global _connection_pool
if _connection_pool:
_connection_pool.closeall()
_connection_pool = None
logging.info("All database connections closed")
def column_exists(cursor, table_name, column_name):
"""Check if a column exists in a table"""
cursor.execute("""
SELECT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_name = %s AND column_name = %s
)
""", (table_name, column_name))
return cursor.fetchone()[0]
def index_exists(cursor, index_name):
"""Check if an index exists"""
cursor.execute("""
SELECT EXISTS (
SELECT 1 FROM pg_class c
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = %s AND n.nspname = 'public'
)
""", (index_name,))
return cursor.fetchone()[0]
def init_db(): def init_db():
conn = get_connection() """Initialize database tables and indexes"""
cursor = conn.cursor() conn = None
cursor.execute(""" try:
CREATE TABLE IF NOT EXISTS subscribers ( conn = get_connection()
id SERIAL PRIMARY KEY, cursor = conn.cursor()
email TEXT UNIQUE NOT NULL
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS newsletters(
id SERIAL PRIMARY KEY,
subject TEXT NOT NULL,
body TEXT NOT NULL,
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
conn.commit() # Create subscribers table
cursor.close() cursor.execute("""
conn.close() CREATE TABLE IF NOT EXISTS subscribers (
id SERIAL PRIMARY KEY,
email TEXT UNIQUE NOT NULL
)
""")
# Add created_at column if it doesn't exist
if not column_exists(cursor, 'subscribers', 'created_at'):
cursor.execute("""
ALTER TABLE subscribers
ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
""")
logging.info("Added created_at column to subscribers table")
# Create newsletters table
cursor.execute("""
CREATE TABLE IF NOT EXISTS newsletters(
id SERIAL PRIMARY KEY,
subject TEXT NOT NULL,
body TEXT NOT NULL,
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
# Create indexes only if they don't exist
if not index_exists(cursor, 'idx_newsletters_sent_at'):
cursor.execute("CREATE INDEX idx_newsletters_sent_at ON newsletters(sent_at DESC)")
logging.info("Created index idx_newsletters_sent_at")
if not index_exists(cursor, 'idx_subscribers_email'):
cursor.execute("CREATE INDEX idx_subscribers_email ON subscribers(email)")
logging.info("Created index idx_subscribers_email")
if not index_exists(cursor, 'idx_subscribers_created_at'):
cursor.execute("CREATE INDEX idx_subscribers_created_at ON subscribers(created_at DESC)")
logging.info("Created index idx_subscribers_created_at")
conn.commit()
cursor.close()
logging.info("Database tables and indexes initialized successfully")
except Exception as e:
logging.error(f"Error initializing database: {e}")
if conn:
conn.rollback()
raise
finally:
if conn:
return_connection(conn)
def add_email(email): def add_email(email):
"""Add email to subscribers with connection pooling"""
conn = None
try: try:
with get_connection() as conn: conn = get_connection()
with conn.cursor() as cursor: cursor = conn.cursor()
cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,)) cursor.execute("INSERT INTO subscribers (email) VALUES (%s)", (email,))
conn.commit() conn.commit()
cursor.close()
logging.info(f"Email added successfully: {email}")
return True return True
except IntegrityError: except IntegrityError:
# Email already exists
if conn:
conn.rollback()
logging.info(f"Email already exists: {email}")
return False return False
except psycopg2.OperationalError as e:
print(f"Error: {e}") except Exception as e:
if conn:
conn.rollback()
logging.error(f"Error adding email {email}: {e}")
return False return False
finally:
if conn:
return_connection(conn)
def remove_email(email): def remove_email(email):
"""Remove email from subscribers with connection pooling"""
conn = None
try: try:
with get_connection() as conn: conn = get_connection()
with conn.cursor() as cursor: cursor = conn.cursor()
cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,)) cursor.execute("DELETE FROM subscribers WHERE email = %s", (email,))
conn.commit() conn.commit()
if cursor.rowcount > 0: rows_affected = cursor.rowcount
return True cursor.close()
return False
if rows_affected > 0:
logging.info(f"Email removed successfully: {email}")
return True
else:
logging.info(f"Email not found for removal: {email}")
return False
except Exception as e: except Exception as e:
print(f"Error removing email: {e}") if conn:
return False conn.rollback()
logging.error(f"Error removing email {email}: {e}")
return False
finally:
if conn:
return_connection(conn)
def get_subscriber_count():
"""Get total number of subscribers"""
conn = None
try:
conn = get_connection()
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM subscribers")
count = cursor.fetchone()[0]
cursor.close()
return count
except Exception as e:
logging.error(f"Error getting subscriber count: {e}")
return 0
finally:
if conn:
return_connection(conn)
# Cleanup function for graceful shutdown
import atexit
atexit.register(close_all_connections)

273
server.py
View file

@ -1,11 +1,12 @@
import os import os
import time import time
import logging
from threading import Thread from threading import Thread
import smtplib import smtplib
from email.mime.text import MIMEText from email.mime.text import MIMEText
from flask import Flask, render_template, request, jsonify from flask import Flask, render_template, request, jsonify, g
from dotenv import load_dotenv from dotenv import load_dotenv
from database import init_db, get_connection, add_email, remove_email from database import init_db, get_connection, return_connection, add_email, remove_email
load_dotenv() load_dotenv()
@ -16,18 +17,14 @@ SMTP_PASSWORD = os.getenv('SMTP_PASSWORD')
app = Flask(__name__) app = Flask(__name__)
_db_initialized = False # Configure logging
logging.basicConfig(level=logging.INFO)
# Cache configuration
_newsletter_cache = {} _newsletter_cache = {}
_cache_timestamp = {} _cache_timestamp = {}
CACHE_DURATION = 300 CACHE_DURATION = 300
def ensure_db_initialized():
"""Lazy database initialization - only runs on first database access"""
global _db_initialized
if not _db_initialized:
init_db()
_db_initialized = True
def get_newsletters_cached(): def get_newsletters_cached():
"""Get newsletters with caching to reduce database hits""" """Get newsletters with caching to reduce database hits"""
current_time = time.time() current_time = time.time()
@ -36,26 +33,32 @@ def get_newsletters_cached():
current_time - _cache_timestamp.get('newsletters', 0) < CACHE_DURATION): current_time - _cache_timestamp.get('newsletters', 0) < CACHE_DURATION):
return _newsletter_cache['newsletters'] return _newsletter_cache['newsletters']
ensure_db_initialized() conn = None
conn = get_connection() try:
cursor = conn.cursor() conn = get_connection()
cursor.execute( cursor = conn.cursor()
"SELECT id, subject, body, sent_at " cursor.execute(
"FROM newsletters ORDER BY sent_at DESC" "SELECT id, subject, body, sent_at "
) "FROM newsletters ORDER BY sent_at DESC LIMIT 100"
rows = cursor.fetchall() )
cursor.close() rows = cursor.fetchall()
conn.close() cursor.close()
newsletters = [ newsletters = [
{"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]} {"id": r[0], "subject": r[1], "body": r[2], "sent_at": r[3]}
for r in rows for r in rows
] ]
_newsletter_cache['newsletters'] = newsletters _newsletter_cache['newsletters'] = newsletters
_cache_timestamp['newsletters'] = current_time _cache_timestamp['newsletters'] = current_time
return newsletters return newsletters
except Exception as e:
app.logger.error(f"Database error in get_newsletters_cached: {e}")
return []
finally:
if conn:
return_connection(conn)
def get_newsletter_by_id_cached(newsletter_id): def get_newsletter_by_id_cached(newsletter_id):
"""Get single newsletter with caching""" """Get single newsletter with caching"""
@ -66,32 +69,38 @@ def get_newsletter_by_id_cached(newsletter_id):
current_time - _cache_timestamp.get(cache_key, 0) < CACHE_DURATION): current_time - _cache_timestamp.get(cache_key, 0) < CACHE_DURATION):
return _newsletter_cache[cache_key] return _newsletter_cache[cache_key]
ensure_db_initialized() conn = None
conn = get_connection() try:
cursor = conn.cursor() conn = get_connection()
cursor.execute( cursor = conn.cursor()
"SELECT id, subject, body, sent_at " cursor.execute(
"FROM newsletters WHERE id = %s", "SELECT id, subject, body, sent_at "
(newsletter_id,) "FROM newsletters WHERE id = %s",
) (newsletter_id,)
row = cursor.fetchone() )
cursor.close() row = cursor.fetchone()
conn.close() cursor.close()
if not row:
return None
newsletter = { if not row:
"id": row[0], return None
"subject": row[1],
"body": row[2], newsletter = {
"sent_at": row[3] "id": row[0],
} "subject": row[1],
"body": row[2],
_newsletter_cache[cache_key] = newsletter "sent_at": row[3]
_cache_timestamp[cache_key] = current_time }
return newsletter _newsletter_cache[cache_key] = newsletter
_cache_timestamp[cache_key] = current_time
return newsletter
except Exception as e:
app.logger.error(f"Database error in get_newsletter_by_id_cached: {e}")
return None
finally:
if conn:
return_connection(conn)
def clear_newsletter_cache(): def clear_newsletter_cache():
"""Clear newsletter cache when data is updated""" """Clear newsletter cache when data is updated"""
@ -103,35 +112,49 @@ def clear_newsletter_cache():
_cache_timestamp.pop(key, None) _cache_timestamp.pop(key, None)
@app.before_request @app.before_request
def start_timer(): def before_request():
request._start_time = time.time() """Start timing the request and set up request context"""
g.start_time = time.time()
@app.after_request @app.after_request
def log_request(response): def after_request(response):
elapsed = time.time() - getattr(request, '_start_time', time.time()) """Log request timing and performance metrics"""
app.logger.info(f"{request.method} {request.path} completed in {elapsed:.3f}s") total_time = time.time() - g.start_time
# Log slow requests
if total_time > 1.0:
app.logger.warning(f"Slow request: {request.method} {request.path} took {total_time:.3f}s")
elif total_time > 0.5:
app.logger.info(f"Request: {request.method} {request.path} took {total_time:.3f}s")
# Add performance headers for debugging
response.headers['X-Response-Time'] = f"{total_time:.3f}s"
return response return response
def send_confirmation_email(to_address: str, unsubscribe_link: str): def send_confirmation_email(to_address: str, unsubscribe_link: str):
""" """
Sends the HTML confirmation email to `to_address`. Sends the HTML confirmation email to `to_address`.
This runs inside its own SMTP_SSL connection (timeout=10s). This runs inside its own SMTP_SSL connection with reduced timeout.
""" """
subject = "Thanks for subscribing!"
html_body = render_template(
"confirmation_email.html",
unsubscribe_link=unsubscribe_link
)
msg = MIMEText(html_body, "html", "utf-8")
msg["Subject"] = subject
msg["From"] = SMTP_USER
msg["To"] = to_address
try: try:
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=10) as server: subject = "Thanks for subscribing!"
html_body = render_template(
"confirmation_email.html",
unsubscribe_link=unsubscribe_link
)
msg = MIMEText(html_body, "html", "utf-8")
msg["Subject"] = subject
msg["From"] = SMTP_USER
msg["To"] = to_address
with smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT, timeout=5) as server:
server.login(SMTP_USER, SMTP_PASSWORD) server.login(SMTP_USER, SMTP_PASSWORD)
server.sendmail(SMTP_USER, [to_address], msg.as_string()) server.sendmail(SMTP_USER, [to_address], msg.as_string())
app.logger.info(f"Confirmation email sent successfully to {to_address}")
except Exception as e: except Exception as e:
app.logger.error(f"Failed to send email to {to_address}: {e}") app.logger.error(f"Failed to send email to {to_address}: {e}")
@ -148,65 +171,113 @@ def index():
@app.route("/subscribe", methods=["POST"]) @app.route("/subscribe", methods=["POST"])
def subscribe(): def subscribe():
"""Subscribe endpoint - lazy loads database only when needed""" """Subscribe endpoint with optimized database handling"""
data = request.get_json() or {} data = request.get_json() or {}
email = data.get("email") email = data.get("email")
if not email: if not email:
return jsonify(error="No email provided"), 400 return jsonify(error="No email provided"), 400
ensure_db_initialized() # Validate email format (basic check)
if "@" not in email or "." not in email.split("@")[-1]:
return jsonify(error="Invalid email format"), 400
if add_email(email): try:
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}" if add_email(email):
unsubscribe_link = f"{request.url_root}unsubscribe?email={email}"
Thread( # Start email sending in background thread
target=send_confirmation_async, Thread(
args=(email, unsubscribe_link), target=send_confirmation_async,
daemon=True args=(email, unsubscribe_link),
).start() daemon=True
).start()
return jsonify(message="Email has been added"), 201 return jsonify(message="Email has been added"), 201
else:
return jsonify(error="Email already exists"), 400 return jsonify(error="Email already exists"), 400
except Exception as e:
app.logger.error(f"Error in subscribe endpoint: {e}")
return jsonify(error="Internal server error"), 500
@app.route("/unsubscribe", methods=["GET"]) @app.route("/unsubscribe", methods=["GET"])
def unsubscribe(): def unsubscribe():
"""Unsubscribe endpoint - lazy loads database only when needed""" """Unsubscribe endpoint with optimized database handling"""
email = request.args.get("email") email = request.args.get("email")
if not email: if not email:
return "No email specified.", 400 return "No email specified.", 400
ensure_db_initialized() try:
if remove_email(email):
if remove_email(email): return f"The email {email} has been unsubscribed.", 200
return f"The email {email} has been unsubscribed.", 200 else:
return f"Email {email} was not found or has already been unsubscribed.", 400 return f"Email {email} was not found or has already been unsubscribed.", 400
except Exception as e:
app.logger.error(f"Error in unsubscribe endpoint: {e}")
return "Internal server error", 500
@app.route("/newsletters", methods=["GET"]) @app.route("/newsletters", methods=["GET"])
def newsletters(): def newsletters():
""" """
List all newsletters (newest first) with caching for better performance. List all newsletters (newest first) with caching for better performance.
""" """
newsletters = get_newsletters_cached() try:
return render_template("newsletters.html", newsletters=newsletters) newsletters = get_newsletters_cached()
return render_template("newsletters.html", newsletters=newsletters)
except Exception as e:
app.logger.error(f"Error in newsletters endpoint: {e}")
return "Internal server error", 500
@app.route("/newsletter/<int:newsletter_id>", methods=["GET"]) @app.route("/newsletter/<int:newsletter_id>", methods=["GET"])
def newsletter_detail(newsletter_id): def newsletter_detail(newsletter_id):
""" """
Show a single newsletter by its ID with caching. Show a single newsletter by its ID with caching.
""" """
newsletter = get_newsletter_by_id_cached(newsletter_id) try:
newsletter = get_newsletter_by_id_cached(newsletter_id)
if not newsletter:
return "Newsletter not found.", 404 if not newsletter:
return "Newsletter not found.", 404
return render_template("newsletter_detail.html", newsletter=newsletter) return render_template("newsletter_detail.html", newsletter=newsletter)
except Exception as e:
app.logger.error(f"Error in newsletter_detail endpoint: {e}")
return "Internal server error", 500
@app.route("/admin/clear-cache", methods=["POST"]) @app.route("/admin/clear-cache", methods=["POST"])
def clear_cache(): def clear_cache():
"""Admin endpoint to clear newsletter cache""" """Admin endpoint to clear newsletter cache"""
clear_newsletter_cache() try:
return jsonify(message="Cache cleared successfully"), 200 clear_newsletter_cache()
return jsonify(message="Cache cleared successfully"), 200
except Exception as e:
app.logger.error(f"Error clearing cache: {e}")
return jsonify(error="Failed to clear cache"), 500
@app.route("/health", methods=["GET"])
def health_check():
"""Health check endpoint for monitoring"""
return jsonify(status="healthy", timestamp=time.time()), 200
# Error handlers
@app.errorhandler(404)
def not_found(error):
return jsonify(error="Not found"), 404
@app.errorhandler(500)
def internal_error(error):
return jsonify(error="Internal server error"), 500
# Initialize database at startup
try:
init_db()
app.logger.info("Database initialized successfully")
except Exception as e:
app.logger.error(f"Failed to initialize database: {e}")
raise
if __name__ == "__main__": if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True) app.run(host="0.0.0.0", debug=True)

View file

@ -6,14 +6,8 @@
<title>RideAware - Smart Cycling Training Platform</title> <title>RideAware - Smart Cycling Training Platform</title>
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet"> <link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<link rel="preconnect" href="https://cdn.statically.io" crossorigin> <link rel="preconnect" href="https://cdn.statically.io" crossorigin>
<link rel="stylesheet" href="{{ url_for('static', filename='css/styles.css') }}">
<link rel="preload" as="style"
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css"
onload="this.onload=null;this.rel='stylesheet'">
<noscript>
<link rel="stylesheet"
href="https://cdn.statically.io/gl/rideaware/landing/main/static/css/styles.min.css">
</noscript>
</head> </head>
<body> <body>
<!-- Navigation --> <!-- Navigation -->