cleanup: remove Python Flask application files
- Remove Flask server.py and Python models - Delete Alembic migrations and configuration - Remove Python requirements.txt and virtual environment scripts - Clean up old Python service layer and route blueprints
This commit is contained in:
parent
5e0ac13488
commit
3028e8f067
13 changed files with 0 additions and 584 deletions
|
|
@ -1 +0,0 @@
|
||||||
Single-database configuration for Flask.
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic,flask_migrate
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[logger_flask_migrate]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = flask_migrate
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
||||||
|
|
@ -1,113 +0,0 @@
|
||||||
import logging
|
|
||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from flask import current_app
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
logger = logging.getLogger('alembic.env')
|
|
||||||
|
|
||||||
|
|
||||||
def get_engine():
|
|
||||||
try:
|
|
||||||
# this works with Flask-SQLAlchemy<3 and Alchemical
|
|
||||||
return current_app.extensions['migrate'].db.get_engine()
|
|
||||||
except (TypeError, AttributeError):
|
|
||||||
# this works with Flask-SQLAlchemy>=3
|
|
||||||
return current_app.extensions['migrate'].db.engine
|
|
||||||
|
|
||||||
|
|
||||||
def get_engine_url():
|
|
||||||
try:
|
|
||||||
return get_engine().url.render_as_string(hide_password=False).replace(
|
|
||||||
'%', '%%')
|
|
||||||
except AttributeError:
|
|
||||||
return str(get_engine().url).replace('%', '%%')
|
|
||||||
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
config.set_main_option('sqlalchemy.url', get_engine_url())
|
|
||||||
target_db = current_app.extensions['migrate'].db
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def get_metadata():
|
|
||||||
if hasattr(target_db, 'metadatas'):
|
|
||||||
return target_db.metadatas[None]
|
|
||||||
return target_db.metadata
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url, target_metadata=get_metadata(), literal_binds=True
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# this callback is used to prevent an auto-migration from being generated
|
|
||||||
# when there are no changes to the schema
|
|
||||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
|
||||||
def process_revision_directives(context, revision, directives):
|
|
||||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
|
||||||
script = directives[0]
|
|
||||||
if script.upgrade_ops.is_empty():
|
|
||||||
directives[:] = []
|
|
||||||
logger.info('No changes in schema detected.')
|
|
||||||
|
|
||||||
conf_args = current_app.extensions['migrate'].configure_args
|
|
||||||
if conf_args.get("process_revision_directives") is None:
|
|
||||||
conf_args["process_revision_directives"] = process_revision_directives
|
|
||||||
|
|
||||||
connectable = get_engine()
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection,
|
|
||||||
target_metadata=get_metadata(),
|
|
||||||
**conf_args
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
||||||
|
|
@ -1,99 +0,0 @@
|
||||||
"""Initial migration
|
|
||||||
|
|
||||||
Revision ID: 0e07095d2961
|
|
||||||
Revises:
|
|
||||||
Create Date: 2025-08-29 01:28:57.822103
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '0e07095d2961'
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table('admins')
|
|
||||||
with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
|
||||||
batch_op.drop_index(batch_op.f('idx_subscribers_created_at'))
|
|
||||||
batch_op.drop_index(batch_op.f('idx_subscribers_email'))
|
|
||||||
batch_op.drop_index(batch_op.f('idx_subscribers_status'))
|
|
||||||
|
|
||||||
op.drop_table('subscribers')
|
|
||||||
op.drop_table('admin_users')
|
|
||||||
op.drop_table('email_deliveries')
|
|
||||||
with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
|
||||||
batch_op.drop_index(batch_op.f('idx_newsletters_sent_at'))
|
|
||||||
|
|
||||||
op.drop_table('newsletters')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('newsletters',
|
|
||||||
sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('newsletters_id_seq'::regclass)"), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('subject', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('body', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('sent_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('sent_by', sa.TEXT(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('recipient_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('success_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('failure_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id', name='newsletters_pkey'),
|
|
||||||
postgresql_ignore_search_path=False
|
|
||||||
)
|
|
||||||
with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
|
||||||
batch_op.create_index(batch_op.f('idx_newsletters_sent_at'), [sa.literal_column('sent_at DESC')], unique=False)
|
|
||||||
|
|
||||||
op.create_table('email_deliveries',
|
|
||||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('newsletter_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('sent_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True),
|
|
||||||
sa.CheckConstraint("status = ANY (ARRAY['sent'::text, 'failed'::text, 'bounced'::text])", name=op.f('email_deliveries_status_check')),
|
|
||||||
sa.ForeignKeyConstraint(['newsletter_id'], ['newsletters.id'], name=op.f('email_deliveries_newsletter_id_fkey')),
|
|
||||||
sa.PrimaryKeyConstraint('id', name=op.f('email_deliveries_pkey'))
|
|
||||||
)
|
|
||||||
op.create_table('admin_users',
|
|
||||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('username', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('password', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id', name=op.f('admin_users_pkey')),
|
|
||||||
sa.UniqueConstraint('username', name=op.f('admin_users_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
|
||||||
)
|
|
||||||
op.create_table('subscribers',
|
|
||||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('subscribed_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('status', sa.TEXT(), server_default=sa.text("'active'::text"), autoincrement=False, nullable=True),
|
|
||||||
sa.Column('source', sa.TEXT(), server_default=sa.text("'manual'::text"), autoincrement=False, nullable=True),
|
|
||||||
sa.CheckConstraint("status = ANY (ARRAY['active'::text, 'unsubscribed'::text])", name=op.f('subscribers_status_check')),
|
|
||||||
sa.PrimaryKeyConstraint('id', name=op.f('subscribers_pkey')),
|
|
||||||
sa.UniqueConstraint('email', name=op.f('subscribers_email_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
|
||||||
)
|
|
||||||
with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
|
||||||
batch_op.create_index(batch_op.f('idx_subscribers_status'), ['status'], unique=False)
|
|
||||||
batch_op.create_index(batch_op.f('idx_subscribers_email'), ['email'], unique=False)
|
|
||||||
batch_op.create_index(batch_op.f('idx_subscribers_created_at'), [sa.literal_column('created_at DESC')], unique=False)
|
|
||||||
|
|
||||||
op.create_table('admins',
|
|
||||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('password_hash', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
|
|
||||||
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id', name=op.f('admins_pkey')),
|
|
||||||
sa.UniqueConstraint('username', name=op.f('admins_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
@ -1,40 +0,0 @@
|
||||||
package models
|
|
||||||
|
|
||||||
import (
|
|
||||||
"golang.org/x/crypto/bcrypt"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
)
|
|
||||||
|
|
||||||
type User struct {
|
|
||||||
ID uint `gorm:"primaryKey" json:"id"`
|
|
||||||
Username string `gorm:"unique;not null;size:80" json:"username"`
|
|
||||||
Email string `gorm:"unique;not null;size:255" json:"email"` // Add this line
|
|
||||||
Password string `gorm:"not null;size:255" json:"-"`
|
|
||||||
|
|
||||||
Profile *UserProfile `gorm:"constraint:OnDelete:CASCADE;" json:"profile,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *User) SetPassword(password string) error {
|
|
||||||
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
u.Password = string(hashedPassword)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *User) CheckPassword(password string) bool {
|
|
||||||
err := bcrypt.CompareHashAndPassword([]byte(u.Password), []byte(password))
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *User) AfterCreate(tx *gorm.DB) error {
|
|
||||||
profile := UserProfile{
|
|
||||||
UserID: u.ID,
|
|
||||||
FirstName: "",
|
|
||||||
LastName: "",
|
|
||||||
Bio: "",
|
|
||||||
ProfilePicture: "",
|
|
||||||
}
|
|
||||||
return tx.Create(&profile).Error
|
|
||||||
}
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
package models
|
|
||||||
|
|
||||||
type UserProfile struct {
|
|
||||||
ID uint `gorm:"primaryKey" json:"id"`
|
|
||||||
UserID uint `gorm:"not null" json:"user_id"`
|
|
||||||
FirstName string `gorm:"size:80;not null" json:"first_name"`
|
|
||||||
LastName string `gorm:"size:80;not null" json:"last_name"`
|
|
||||||
Bio string `gorm:"type:text" json:"bio"`
|
|
||||||
ProfilePicture string `gorm:"size:255" json:"profile_picture"`
|
|
||||||
|
|
||||||
User *User `gorm:"foreignKey:UserID" json:"user,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
Flask
|
|
||||||
flask_bcrypt
|
|
||||||
flask_cors
|
|
||||||
flask_sqlalchemy
|
|
||||||
python-dotenv
|
|
||||||
werkzeug
|
|
||||||
psycopg2-binary
|
|
||||||
Flask-Migrate
|
|
||||||
|
|
@ -1,89 +0,0 @@
|
||||||
package routes
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/gin-contrib/sessions"
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/rideaware/rideaware-api/services"
|
|
||||||
)
|
|
||||||
|
|
||||||
func RegisterAuthRoutes(r *gin.Engine, db *gorm.DB) {
|
|
||||||
userService := services.NewUserService(db)
|
|
||||||
|
|
||||||
auth := r.Group("/auth")
|
|
||||||
{
|
|
||||||
auth.POST("/signup", signup(userService))
|
|
||||||
auth.POST("/login", login(userService))
|
|
||||||
auth.POST("/logout", logout())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func signup(userService *services.UserService) gin.HandlerFunc {
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
var req struct {
|
|
||||||
Username string `json:"username" binding:"required"`
|
|
||||||
Email string `json:"email" binding:"required"`
|
|
||||||
Password string `json:"password" binding:"required"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := c.ShouldBindJSON(&req); err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
user, err := userService.CreateUser(req.Username, req.Email, req.Password)
|
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusCreated, gin.H{
|
|
||||||
"message": "User created successfully",
|
|
||||||
"username": user.Username,
|
|
||||||
"email": user.Email,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func login(userService *services.UserService) gin.HandlerFunc {
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
var req struct {
|
|
||||||
Username string `json:"username" binding:"required"`
|
|
||||||
Password string `json:"password" binding:"required"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := c.ShouldBindJSON(&req); err != nil {
|
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
user, err := userService.VerifyUser(req.Username, req.Password)
|
|
||||||
if err != nil {
|
|
||||||
c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set session
|
|
||||||
session := sessions.Default(c)
|
|
||||||
session.Set("user_id", user.ID)
|
|
||||||
session.Save()
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
|
||||||
"message": "Login successful",
|
|
||||||
"user_id": user.ID,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func logout() gin.HandlerFunc {
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
session := sessions.Default(c)
|
|
||||||
session.Clear()
|
|
||||||
session.Save()
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{"message": "Logout successful"})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Running database migrations..."
|
|
||||||
flask db upgrade
|
|
||||||
|
|
||||||
echo "Starting application..."
|
|
||||||
exec "$@"
|
|
||||||
33
server.py
33
server.py
|
|
@ -1,33 +0,0 @@
|
||||||
import os
|
|
||||||
from flask import Flask
|
|
||||||
from flask_cors import CORS
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from flask_migrate import Migrate
|
|
||||||
from flask.cli import FlaskGroup
|
|
||||||
|
|
||||||
from models import db, init_db
|
|
||||||
from routes.user_auth import auth
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
|
||||||
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
|
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE")
|
|
||||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
|
||||||
|
|
||||||
CORS(app)
|
|
||||||
|
|
||||||
init_db(app)
|
|
||||||
migrate = Migrate(app, db)
|
|
||||||
app.register_blueprint(auth.auth_bp)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/health")
|
|
||||||
def health_check():
|
|
||||||
"""Health check endpoint."""
|
|
||||||
return "OK", 200
|
|
||||||
|
|
||||||
cli = FlaskGroup(app)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
cli()
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
package services
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/smtp"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
type EmailService struct {
|
|
||||||
smtpHost string
|
|
||||||
smtpPort string
|
|
||||||
smtpUser string
|
|
||||||
smtpPassword string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewEmailService() *EmailService {
|
|
||||||
return &EmailService{
|
|
||||||
smtpHost: os.Getenv("SMTP_SERVER"),
|
|
||||||
smtpPort: os.Getenv("SMTP_PORT"),
|
|
||||||
smtpUser: os.Getenv("SMTP_USER"),
|
|
||||||
smtpPassword: os.Getenv("SMTP_PASSWORD"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *EmailService) SendEmail(to, subject, body string) error {
|
|
||||||
from := e.smtpUser
|
|
||||||
|
|
||||||
msg := fmt.Sprintf("From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s", from, to, subject, body)
|
|
||||||
|
|
||||||
auth := smtp.PlainAuth("", e.smtpUser, e.smtpPassword, e.smtpHost)
|
|
||||||
addr := fmt.Sprintf("%s:%s", e.smtpHost, e.smtpPort)
|
|
||||||
|
|
||||||
return smtp.SendMail(addr, auth, from, []string{to}, []byte(msg))
|
|
||||||
}
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
package services
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"log"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/rideaware/rideaware-api/models"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
)
|
|
||||||
|
|
||||||
type UserService struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewUserService(db *gorm.DB) *UserService {
|
|
||||||
return &UserService{db: db}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *UserService) CreateUser(username, email, password string) (*models.User, error) {
|
|
||||||
if username == "" || email == "" || password == "" {
|
|
||||||
return nil, errors.New("username, email, and password are required")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(username) < 3 || len(password) < 8 {
|
|
||||||
return nil, errors.New("username must be at least 3 characters and password must be at least 8 characters")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Basic email validation
|
|
||||||
if !strings.Contains(email, "@") {
|
|
||||||
return nil, errors.New("invalid email format")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if user exists (by username or email)
|
|
||||||
var existingUser models.User
|
|
||||||
if err := s.db.Where("username = ? OR email = ?", username, email).First(&existingUser).Error; err == nil {
|
|
||||||
return nil, errors.New("user with this username or email already exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create new user
|
|
||||||
user := models.User{
|
|
||||||
Username: username,
|
|
||||||
Email: email,
|
|
||||||
}
|
|
||||||
if err := user.SetPassword(password); err != nil {
|
|
||||||
log.Printf("Error hashing password: %v", err)
|
|
||||||
return nil, errors.New("could not create user")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := s.db.Create(&user).Error; err != nil {
|
|
||||||
log.Printf("Error creating user: %v", err)
|
|
||||||
return nil, errors.New("could not create user")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &user, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *UserService) VerifyUser(username, password string) (*models.User, error) {
|
|
||||||
var user models.User
|
|
||||||
// Allow login with either username or email
|
|
||||||
if err := s.db.Where("username = ? OR email = ?", username, username).First(&user).Error; err != nil {
|
|
||||||
log.Printf("User not found: %s", username)
|
|
||||||
return nil, errors.New("invalid username or password")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !user.CheckPassword(password) {
|
|
||||||
log.Printf("Invalid password for user: %s", username)
|
|
||||||
return nil, errors.New("invalid username or password")
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("User verified: %s", username)
|
|
||||||
return &user, nil
|
|
||||||
}
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue