Compare commits
No commits in common. "a8bcd5e249bf9552dd433d5ec26937f19e861282" and "7cd0cb2377eb297d4fb7f12d1756b40046840392" have entirely different histories.
a8bcd5e249
...
7cd0cb2377
10 changed files with 16 additions and 316 deletions
17
.gitignore
vendored
17
.gitignore
vendored
|
|
@ -171,19 +171,4 @@ cython_debug/
|
|||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Flask-Migrate / Alembic
|
||||
# Keep migrations in Git, but ignore cache/compiled files
|
||||
migrations/__pycache__/
|
||||
migrations/*.pyc
|
||||
|
||||
# Docker
|
||||
*.pid
|
||||
*.log
|
||||
docker-compose.override.yml
|
||||
.docker/
|
||||
.wheels/
|
||||
|
||||
# VSCode / Editor configs
|
||||
.vscode/
|
||||
.pypirc
|
||||
|
|
@ -1 +0,0 @@
|
|||
Single-database configuration for Flask.
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic,flask_migrate
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[logger_flask_migrate]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = flask_migrate
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
|
||||
def get_engine():
|
||||
try:
|
||||
# this works with Flask-SQLAlchemy<3 and Alchemical
|
||||
return current_app.extensions['migrate'].db.get_engine()
|
||||
except (TypeError, AttributeError):
|
||||
# this works with Flask-SQLAlchemy>=3
|
||||
return current_app.extensions['migrate'].db.engine
|
||||
|
||||
|
||||
def get_engine_url():
|
||||
try:
|
||||
return get_engine().url.render_as_string(hide_password=False).replace(
|
||||
'%', '%%')
|
||||
except AttributeError:
|
||||
return str(get_engine().url).replace('%', '%%')
|
||||
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
config.set_main_option('sqlalchemy.url', get_engine_url())
|
||||
target_db = current_app.extensions['migrate'].db
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def get_metadata():
|
||||
if hasattr(target_db, 'metadatas'):
|
||||
return target_db.metadatas[None]
|
||||
return target_db.metadata
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=get_metadata(), literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
conf_args = current_app.extensions['migrate'].configure_args
|
||||
if conf_args.get("process_revision_directives") is None:
|
||||
conf_args["process_revision_directives"] = process_revision_directives
|
||||
|
||||
connectable = get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=get_metadata(),
|
||||
**conf_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
"""Initial migration
|
||||
|
||||
Revision ID: 0e07095d2961
|
||||
Revises:
|
||||
Create Date: 2025-08-29 01:28:57.822103
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0e07095d2961'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('admins')
|
||||
with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('idx_subscribers_created_at'))
|
||||
batch_op.drop_index(batch_op.f('idx_subscribers_email'))
|
||||
batch_op.drop_index(batch_op.f('idx_subscribers_status'))
|
||||
|
||||
op.drop_table('subscribers')
|
||||
op.drop_table('admin_users')
|
||||
op.drop_table('email_deliveries')
|
||||
with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('idx_newsletters_sent_at'))
|
||||
|
||||
op.drop_table('newsletters')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('newsletters',
|
||||
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('newsletters_id_seq'::regclass)"), autoincrement=True, nullable=False),
|
||||
sa.Column('subject', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('body', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('sent_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.Column('sent_by', sa.TEXT(), autoincrement=False, nullable=True),
|
||||
sa.Column('recipient_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.Column('success_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.Column('failure_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='newsletters_pkey'),
|
||||
postgresql_ignore_search_path=False
|
||||
)
|
||||
with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('idx_newsletters_sent_at'), [sa.literal_column('sent_at DESC')], unique=False)
|
||||
|
||||
op.create_table('email_deliveries',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('newsletter_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
|
||||
sa.Column('sent_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True),
|
||||
sa.CheckConstraint("status = ANY (ARRAY['sent'::text, 'failed'::text, 'bounced'::text])", name=op.f('email_deliveries_status_check')),
|
||||
sa.ForeignKeyConstraint(['newsletter_id'], ['newsletters.id'], name=op.f('email_deliveries_newsletter_id_fkey')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('email_deliveries_pkey'))
|
||||
)
|
||||
op.create_table('admin_users',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('username', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('password', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('admin_users_pkey')),
|
||||
sa.UniqueConstraint('username', name=op.f('admin_users_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||
)
|
||||
op.create_table('subscribers',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.Column('subscribed_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.Column('status', sa.TEXT(), server_default=sa.text("'active'::text"), autoincrement=False, nullable=True),
|
||||
sa.Column('source', sa.TEXT(), server_default=sa.text("'manual'::text"), autoincrement=False, nullable=True),
|
||||
sa.CheckConstraint("status = ANY (ARRAY['active'::text, 'unsubscribed'::text])", name=op.f('subscribers_status_check')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('subscribers_pkey')),
|
||||
sa.UniqueConstraint('email', name=op.f('subscribers_email_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||
)
|
||||
with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('idx_subscribers_status'), ['status'], unique=False)
|
||||
batch_op.create_index(batch_op.f('idx_subscribers_email'), ['email'], unique=False)
|
||||
batch_op.create_index(batch_op.f('idx_subscribers_created_at'), [sa.literal_column('created_at DESC')], unique=False)
|
||||
|
||||
op.create_table('admins',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
|
||||
sa.Column('password_hash', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('admins_pkey')),
|
||||
sa.UniqueConstraint('username', name=op.f('admins_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -5,11 +5,11 @@ from urllib.parse import quote_plus
|
|||
|
||||
load_dotenv()
|
||||
|
||||
PG_USER = quote_plus(os.getenv("PG_USER", "postgres"))
|
||||
PG_PASSWORD = quote_plus(os.getenv("PG_PASSWORD", "postgres"))
|
||||
PG_HOST = os.getenv("PG_HOST", "localhost")
|
||||
PG_PORT = os.getenv("PG_PORT", "5432")
|
||||
PG_DATABASE = os.getenv("PG_DATABASE", "rideaware")
|
||||
PG_USER = quote_plus(os.getenv('PG_USER'))
|
||||
PG_PASSWORD = quote_plus(os.getenv('PG_PASSWORD'))
|
||||
PG_HOST = os.getenv('PG_HOST')
|
||||
PG_PORT = os.getenv('PG_PORT')
|
||||
PG_DATABASE = os.getenv('PG_DATABASE')
|
||||
|
||||
DATABASE_URI = f"postgresql+psycopg2://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}"
|
||||
|
||||
|
|
|
|||
|
|
@ -4,5 +4,4 @@ flask_cors
|
|||
flask_sqlalchemy
|
||||
python-dotenv
|
||||
werkzeug
|
||||
psycopg2-binary
|
||||
Flask-Migrate
|
||||
psycopg2-binary
|
||||
|
|
@ -25,11 +25,15 @@ def signup():
|
|||
@auth_bp.route("/login", methods=["POST"])
|
||||
def login():
|
||||
data = request.get_json()
|
||||
<<<<<<< HEAD
|
||||
username = data.get("username")
|
||||
password = data.get("password")
|
||||
|
||||
print(f"Login attempt: username={username}, password={password}")
|
||||
|
||||
=======
|
||||
|
||||
>>>>>>> 3ab162d8b88a23ad1d0ef5f72a3162bdd7f75ca8
|
||||
try:
|
||||
user = user_service.verify_user(username, password)
|
||||
session["user_id"] = user.id
|
||||
|
|
|
|||
11
server.py
11
server.py
|
|
@ -2,8 +2,6 @@ import os
|
|||
from flask import Flask
|
||||
from flask_cors import CORS
|
||||
from dotenv import load_dotenv
|
||||
from flask_migrate import Migrate
|
||||
from flask.cli import FlaskGroup
|
||||
|
||||
from models import db, init_db
|
||||
from routes.user_auth import auth
|
||||
|
|
@ -15,10 +13,9 @@ app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
|
|||
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE")
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
|
||||
CORS(app)
|
||||
CORS(app) # Consider specific origins in production
|
||||
|
||||
init_db(app)
|
||||
migrate = Migrate(app, db)
|
||||
app.register_blueprint(auth.auth_bp)
|
||||
|
||||
|
||||
|
|
@ -27,7 +24,9 @@ def health_check():
|
|||
"""Health check endpoint."""
|
||||
return "OK", 200
|
||||
|
||||
cli = FlaskGroup(app)
|
||||
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
app.run(debug=True)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue