From b7a2f2ed4fc214da2b8c42df9ce7686524f75b22 Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:30:58 -0500 Subject: [PATCH 1/6] feat: updated gitignore for migrations --- .gitignore | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 1800114..a4d0d9e 100644 --- a/.gitignore +++ b/.gitignore @@ -171,4 +171,19 @@ cython_debug/ .ruff_cache/ # PyPI configuration file -.pypirc \ No newline at end of file +.pypirc + +# Flask-Migrate / Alembic +# Keep migrations in Git, but ignore cache/compiled files +migrations/__pycache__/ +migrations/*.pyc + +# Docker +*.pid +*.log +docker-compose.override.yml +.docker/ +.wheels/ + +# VSCode / Editor configs +.vscode/ \ No newline at end of file From 0ac0e03e7a45f599ad9cd75611b7b1e36c1af5d6 Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:31:08 -0500 Subject: [PATCH 2/6] added flask-migrate --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 103960e..001e473 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ flask_cors flask_sqlalchemy python-dotenv werkzeug -psycopg2-binary \ No newline at end of file +psycopg2-binary +Flask-Migrate \ No newline at end of file From cb664a284c958ec07a79f42ec6936ae23609555d Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:31:33 -0500 Subject: [PATCH 3/6] add flask migrate and flask.cli as well as got rid of db create all --- server.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/server.py b/server.py index 1b00247..5800353 100644 --- a/server.py +++ b/server.py @@ -2,6 +2,8 @@ import os from flask import Flask from flask_cors import CORS from dotenv import load_dotenv +from flask_migrate import Migrate +from flask.cli import FlaskGroup from models import db, init_db from routes.user_auth import auth @@ -13,9 +15,10 @@ app.config["SECRET_KEY"] = os.getenv("SECRET_KEY") app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE") app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False -CORS(app) # Consider specific origins in production +CORS(app) init_db(app) +migrate = Migrate(app, db) app.register_blueprint(auth.auth_bp) @@ -24,9 +27,7 @@ def health_check(): """Health check endpoint.""" return "OK", 200 - -with app.app_context(): - db.create_all() +cli = FlaskGroup(app) if __name__ == "__main__": - app.run(debug=True) + cli() \ No newline at end of file From 8698a0f8757de6d50a679eeea63aeec94442cbce Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:31:48 -0500 Subject: [PATCH 4/6] init: first migrations --- migrations/README | 1 + migrations/alembic.ini | 50 ++++++++ migrations/env.py | 113 ++++++++++++++++++ migrations/script.py.mako | 24 ++++ .../0e07095d2961_initial_migration.py | 99 +++++++++++++++ 5 files changed, 287 insertions(+) create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/0e07095d2961_initial_migration.py diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..4c97092 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/0e07095d2961_initial_migration.py b/migrations/versions/0e07095d2961_initial_migration.py new file mode 100644 index 0000000..594c8d6 --- /dev/null +++ b/migrations/versions/0e07095d2961_initial_migration.py @@ -0,0 +1,99 @@ +"""Initial migration + +Revision ID: 0e07095d2961 +Revises: +Create Date: 2025-08-29 01:28:57.822103 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '0e07095d2961' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('admins') + with op.batch_alter_table('subscribers', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('idx_subscribers_created_at')) + batch_op.drop_index(batch_op.f('idx_subscribers_email')) + batch_op.drop_index(batch_op.f('idx_subscribers_status')) + + op.drop_table('subscribers') + op.drop_table('admin_users') + op.drop_table('email_deliveries') + with op.batch_alter_table('newsletters', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('idx_newsletters_sent_at')) + + op.drop_table('newsletters') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('newsletters', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('newsletters_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('subject', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('body', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('sent_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('sent_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('recipient_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True), + sa.Column('success_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True), + sa.Column('failure_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='newsletters_pkey'), + postgresql_ignore_search_path=False + ) + with op.batch_alter_table('newsletters', schema=None) as batch_op: + batch_op.create_index(batch_op.f('idx_newsletters_sent_at'), [sa.literal_column('sent_at DESC')], unique=False) + + op.create_table('email_deliveries', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('newsletter_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('sent_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True), + sa.CheckConstraint("status = ANY (ARRAY['sent'::text, 'failed'::text, 'bounced'::text])", name=op.f('email_deliveries_status_check')), + sa.ForeignKeyConstraint(['newsletter_id'], ['newsletters.id'], name=op.f('email_deliveries_newsletter_id_fkey')), + sa.PrimaryKeyConstraint('id', name=op.f('email_deliveries_pkey')) + ) + op.create_table('admin_users', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('password', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('admin_users_pkey')), + sa.UniqueConstraint('username', name=op.f('admin_users_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False) + ) + op.create_table('subscribers', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('subscribed_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), server_default=sa.text("'active'::text"), autoincrement=False, nullable=True), + sa.Column('source', sa.TEXT(), server_default=sa.text("'manual'::text"), autoincrement=False, nullable=True), + sa.CheckConstraint("status = ANY (ARRAY['active'::text, 'unsubscribed'::text])", name=op.f('subscribers_status_check')), + sa.PrimaryKeyConstraint('id', name=op.f('subscribers_pkey')), + sa.UniqueConstraint('email', name=op.f('subscribers_email_key'), postgresql_include=[], postgresql_nulls_not_distinct=False) + ) + with op.batch_alter_table('subscribers', schema=None) as batch_op: + batch_op.create_index(batch_op.f('idx_subscribers_status'), ['status'], unique=False) + batch_op.create_index(batch_op.f('idx_subscribers_email'), ['email'], unique=False) + batch_op.create_index(batch_op.f('idx_subscribers_created_at'), [sa.literal_column('created_at DESC')], unique=False) + + op.create_table('admins', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=False), + sa.Column('password_hash', sa.VARCHAR(length=255), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('admins_pkey')), + sa.UniqueConstraint('username', name=op.f('admins_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False) + ) + # ### end Alembic commands ### From ad1ec15523a5e14ec7a12d925d522038c90fd7b1 Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:32:17 -0500 Subject: [PATCH 5/6] fixed some db connections and added some dev stuff --- models/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/models/__init__.py b/models/__init__.py index a53c270..8dd3fe9 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -5,11 +5,11 @@ from urllib.parse import quote_plus load_dotenv() -PG_USER = quote_plus(os.getenv('PG_USER')) -PG_PASSWORD = quote_plus(os.getenv('PG_PASSWORD')) -PG_HOST = os.getenv('PG_HOST') -PG_PORT = os.getenv('PG_PORT') -PG_DATABASE = os.getenv('PG_DATABASE') +PG_USER = quote_plus(os.getenv("PG_USER", "postgres")) +PG_PASSWORD = quote_plus(os.getenv("PG_PASSWORD", "postgres")) +PG_HOST = os.getenv("PG_HOST", "localhost") +PG_PORT = os.getenv("PG_PORT", "5432") +PG_DATABASE = os.getenv("PG_DATABASE", "rideaware") DATABASE_URI = f"postgresql+psycopg2://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}" From a8bcd5e249bf9552dd433d5ec26937f19e861282 Mon Sep 17 00:00:00 2001 From: Cipher Vance Date: Thu, 28 Aug 2025 20:32:26 -0500 Subject: [PATCH 6/6] removed the HEAD changes --- routes/user_auth/auth.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/routes/user_auth/auth.py b/routes/user_auth/auth.py index 28cebd5..366642f 100644 --- a/routes/user_auth/auth.py +++ b/routes/user_auth/auth.py @@ -25,15 +25,11 @@ def signup(): @auth_bp.route("/login", methods=["POST"]) def login(): data = request.get_json() -<<<<<<< HEAD username = data.get("username") password = data.get("password") print(f"Login attempt: username={username}, password={password}") -======= - ->>>>>>> 3ab162d8b88a23ad1d0ef5f72a3162bdd7f75ca8 try: user = user_service.verify_user(username, password) session["user_id"] = user.id