refactor: complete Python to Go migration
- Migrate from Flask to Gin web framework - Replace SQLAlchemy with GORM for database operations - Convert Python models to Go structs with proper validation - Update Docker configuration for Go deployment - Maintain API compatibility for existing endpoints
This commit is contained in:
parent
3028e8f067
commit
111aab26bb
1 changed files with 915 additions and 0 deletions
915
diff.txt
Normal file
915
diff.txt
Normal file
|
|
@ -0,0 +1,915 @@
|
||||||
|
diff --git a/.dockerignore b/.dockerignore
|
||||||
|
index d9b625e..3f2b844 100644
|
||||||
|
--- a/.dockerignore
|
||||||
|
+++ b/.dockerignore
|
||||||
|
@@ -1,9 +1,13 @@
|
||||||
|
+.env
|
||||||
|
.git
|
||||||
|
-__pycache__/
|
||||||
|
-*.py[cod]
|
||||||
|
+.gitignore
|
||||||
|
+README.md
|
||||||
|
+Dockerfile
|
||||||
|
+.dockerignore
|
||||||
|
+.air.toml
|
||||||
|
+tmp/
|
||||||
|
+.vscode/
|
||||||
|
+.idea/
|
||||||
|
*.log
|
||||||
|
-!.env
|
||||||
|
-venv/
|
||||||
|
-.venv/
|
||||||
|
-dist/
|
||||||
|
-build/
|
||||||
|
\ No newline at end of file
|
||||||
|
+coverage.out
|
||||||
|
+rideaware-api
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/Dockerfile b/Dockerfile
|
||||||
|
index c72dff0..7aa361b 100644
|
||||||
|
--- a/Dockerfile
|
||||||
|
+++ b/Dockerfile
|
||||||
|
@@ -1,53 +1,52 @@
|
||||||
|
-FROM python:3.10-slim AS builder
|
||||||
|
-
|
||||||
|
-ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
- PYTHONUNBUFFERED=1 \
|
||||||
|
- PIP_NO_CACHE_DIR=1
|
||||||
|
+# Build stage
|
||||||
|
+FROM golang:1.21-alpine AS builder
|
||||||
|
|
||||||
|
+# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
-RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
- build-essential gcc \
|
||||||
|
- && rm -rf /var/lib/apt/lists/*
|
||||||
|
+# Install git (needed for some Go modules)
|
||||||
|
+RUN apk add --no-cache git
|
||||||
|
|
||||||
|
-COPY requirements.txt .
|
||||||
|
+# Copy go mod files
|
||||||
|
+COPY go.mod go.sum ./
|
||||||
|
|
||||||
|
-RUN python -m pip install --upgrade pip && \
|
||||||
|
- pip wheel --no-deps -r requirements.txt -w /wheels && \
|
||||||
|
- pip wheel --no-deps gunicorn -w /wheels
|
||||||
|
+# Download dependencies
|
||||||
|
+RUN go mod download
|
||||||
|
|
||||||
|
-FROM python:3.10-slim AS runtime
|
||||||
|
+# Copy source code
|
||||||
|
+COPY . .
|
||||||
|
|
||||||
|
-ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
- PYTHONUNBUFFERED=1 \
|
||||||
|
- PIP_NO_CACHE_DIR=1 \
|
||||||
|
- PORT=5000 \
|
||||||
|
- WSGI_MODULE=server:app \
|
||||||
|
- GUNICORN_WORKERS=2 \
|
||||||
|
- GUNICORN_THREADS=4 \
|
||||||
|
- GUNICORN_TIMEOUT=60 \
|
||||||
|
- FLASK_APP=server.py
|
||||||
|
+# Build the application
|
||||||
|
+RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o rideaware-api .
|
||||||
|
|
||||||
|
-WORKDIR /app
|
||||||
|
+# Production stage
|
||||||
|
+FROM alpine:latest
|
||||||
|
|
||||||
|
-RUN groupadd -g 10001 app && useradd -m -u 10001 -g app app
|
||||||
|
+# Install ca-certificates for HTTPS requests and timezone data
|
||||||
|
+RUN apk --no-cache add ca-certificates tzdata
|
||||||
|
|
||||||
|
-COPY --from=builder /wheels /wheels
|
||||||
|
-RUN pip install --no-cache-dir /wheels/* && rm -rf /wheels
|
||||||
|
+# Create non-root user
|
||||||
|
+RUN addgroup -g 1001 -S appgroup && \
|
||||||
|
+ adduser -u 1001 -S appuser -G appgroup
|
||||||
|
|
||||||
|
-# Install python-dotenv if not already in requirements.txt
|
||||||
|
-RUN pip install python-dotenv
|
||||||
|
+# Set working directory
|
||||||
|
+WORKDIR /home/appuser
|
||||||
|
|
||||||
|
-USER app
|
||||||
|
+# Copy binary from builder stage
|
||||||
|
+COPY --from=builder /app/rideaware-api .
|
||||||
|
|
||||||
|
-COPY --chown=app:app . .
|
||||||
|
+# Change ownership to non-root user
|
||||||
|
+RUN chown -R appuser:appgroup /home/appuser
|
||||||
|
|
||||||
|
-# Copy .env file specifically
|
||||||
|
-COPY --chown=app:app .env .env
|
||||||
|
+# Switch to non-root user
|
||||||
|
+USER appuser
|
||||||
|
|
||||||
|
-EXPOSE 5000
|
||||||
|
+# Expose port
|
||||||
|
+EXPOSE 8080
|
||||||
|
|
||||||
|
-HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
|
||||||
|
- CMD python -c "import os,socket; s=socket.socket(); s.settimeout(2); s.connect(('127.0.0.1', int(os.getenv('PORT', '5000')))); s.close()"
|
||||||
|
+# Health check
|
||||||
|
+HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||||
|
+ CMD wget --no-verbose --tries=1 --spider http://localhost:8080/health || exit 1
|
||||||
|
|
||||||
|
-CMD ["sh", "-c", "exec gunicorn $WSGI_MODULE --bind=0.0.0.0:$PORT --workers=$GUNICORN_WORKERS --threads=$GUNICORN_THREADS --timeout=$GUNICORN_TIMEOUT --access-logfile=- --error-logfile=- --keep-alive=5"]
|
||||||
|
\ No newline at end of file
|
||||||
|
+# Run the application
|
||||||
|
+CMD ["./rideaware-api"]
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/README.md b/README.md
|
||||||
|
index cda9868..65ce1b9 100644
|
||||||
|
--- a/README.md
|
||||||
|
+++ b/README.md
|
||||||
|
@@ -15,53 +15,78 @@ Whether you're building a structured training plan, analyzing ride data, or comp
|
||||||
|
Ensure you have the following installed on your system:
|
||||||
|
|
||||||
|
- Docker
|
||||||
|
-- Python 3.10 or later
|
||||||
|
-- pip
|
||||||
|
+- Go 1.21 or later
|
||||||
|
+- PostgreSQL (for local development, optional)
|
||||||
|
|
||||||
|
### Setting Up the Project
|
||||||
|
|
||||||
|
1. **Clone the Repository**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
- git clone https://github.com/VeloInnovate/rideaware-api.git
|
||||||
|
+ git clone https://github.com/rideaware/rideaware-api.git
|
||||||
|
cd rideaware-api
|
||||||
|
```
|
||||||
|
|
||||||
|
-2. **Create a Virtual Environment**
|
||||||
|
- It is recommended to use a Python virtual environment to isolate dependencies.
|
||||||
|
+2. **Install Go Dependencies**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
- python3 -m venv .venv
|
||||||
|
+ go mod tidy
|
||||||
|
```
|
||||||
|
|
||||||
|
-3. **Activate the Virtual Environment**
|
||||||
|
- - On Linux/Mac:
|
||||||
|
- ```bash
|
||||||
|
- source .venv/bin/activate
|
||||||
|
- ```
|
||||||
|
- - On Windows:
|
||||||
|
- ```cmd
|
||||||
|
- .venv\Scripts\activate
|
||||||
|
- ```
|
||||||
|
-
|
||||||
|
-4. **Install Requirements**
|
||||||
|
- Install the required Python packages using pip:
|
||||||
|
+3. **Build the Application**
|
||||||
|
```bash
|
||||||
|
- pip install -r requirements.txt
|
||||||
|
+ go build -o rideaware-api
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
The application uses environment variables for configuration. Create a `.env` file in the root directory and define the following variables:
|
||||||
|
|
||||||
|
+```env
|
||||||
|
+# Database Configuration
|
||||||
|
+PG_HOST=your_postgres_host
|
||||||
|
+PG_PORT=5432
|
||||||
|
+PG_DATABASE=rideaware
|
||||||
|
+PG_USER=your_postgres_user
|
||||||
|
+PG_PASSWORD=your_postgres_password
|
||||||
|
+
|
||||||
|
+# Application Configuration
|
||||||
|
+SECRET_KEY=your_secret_key_for_sessions
|
||||||
|
+PORT=8080
|
||||||
|
+
|
||||||
|
+# Email Configuration (Optional)
|
||||||
|
+SMTP_SERVER=your_smtp_server
|
||||||
|
+SMTP_PORT=465
|
||||||
|
+SMTP_USER=your_email@domain.com
|
||||||
|
+SMTP_PASSWORD=your_email_password
|
||||||
|
```
|
||||||
|
-DATABASE=<your_database_connection_string>
|
||||||
|
+
|
||||||
|
+### Running the Application
|
||||||
|
+
|
||||||
|
+#### Development Mode
|
||||||
|
+
|
||||||
|
+```bash
|
||||||
|
+go run main.go
|
||||||
|
+```
|
||||||
|
+
|
||||||
|
+The application will be available at http://localhost:8080.
|
||||||
|
+
|
||||||
|
+#### Production Mode
|
||||||
|
+
|
||||||
|
+```bash
|
||||||
|
+./rideaware-api
|
||||||
|
```
|
||||||
|
-- Replace `<your_database_connection_string>` with the URI of your database (e.g., SQLite, PostgreSQL).
|
||||||
|
+
|
||||||
|
+### API Endpoints
|
||||||
|
+
|
||||||
|
+- `GET /health` - Health check endpoint
|
||||||
|
+- `POST /auth/signup` - User registration
|
||||||
|
+- `POST /auth/login` - User authentication
|
||||||
|
+- `POST /auth/logout` - User logout
|
||||||
|
|
||||||
|
### Running with Docker
|
||||||
|
|
||||||
|
-To run the application in a containerized environment, you can use the provided Dockerfile.
|
||||||
|
+To run the application in a containerized environment:
|
||||||
|
|
||||||
|
1. **Build the Docker Image**:
|
||||||
|
|
||||||
|
@@ -72,14 +97,60 @@ docker build -t rideaware-api .
|
||||||
|
2. **Run the Container**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
-docker run -d -p 5000:5000 --env-file .env rideaware-api
|
||||||
|
+docker run -d -p 8080:8080 --env-file .env rideaware-api
|
||||||
|
+```
|
||||||
|
+
|
||||||
|
+The application will be available at http://localhost:8080.
|
||||||
|
+
|
||||||
|
+### Example Dockerfile
|
||||||
|
+
|
||||||
|
+```dockerfile
|
||||||
|
+FROM golang:1.21-alpine AS builder
|
||||||
|
+
|
||||||
|
+WORKDIR /app
|
||||||
|
+COPY go.mod go.sum ./
|
||||||
|
+RUN go mod download
|
||||||
|
+
|
||||||
|
+COPY . .
|
||||||
|
+RUN go build -o rideaware-api
|
||||||
|
+
|
||||||
|
+FROM alpine:latest
|
||||||
|
+RUN apk --no-cache add ca-certificates
|
||||||
|
+WORKDIR /root/
|
||||||
|
+
|
||||||
|
+COPY --from=builder /app/rideaware-api .
|
||||||
|
+CMD ["./rideaware-api"]
|
||||||
|
```
|
||||||
|
|
||||||
|
-The application will be available at http://127.0.0.1:5000.
|
||||||
|
+### Database Migration
|
||||||
|
+
|
||||||
|
+The application automatically runs database migrations on startup using GORM's AutoMigrate feature. This will create the necessary tables:
|
||||||
|
+
|
||||||
|
+- `users` - User accounts
|
||||||
|
+- `user_profiles` - User profile information
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
-To be added.
|
||||||
|
+To run tests:
|
||||||
|
+
|
||||||
|
+```bash
|
||||||
|
+go test ./...
|
||||||
|
+```
|
||||||
|
+
|
||||||
|
+To run tests with coverage:
|
||||||
|
+
|
||||||
|
+```bash
|
||||||
|
+go test -cover ./...
|
||||||
|
+```
|
||||||
|
+
|
||||||
|
+### Development
|
||||||
|
+
|
||||||
|
+To add new features:
|
||||||
|
+
|
||||||
|
+1. Create models in the `models/` directory
|
||||||
|
+2. Add business logic in the `services/` directory
|
||||||
|
+3. Define API routes in the `routes/` directory
|
||||||
|
+4. Register routes in `main.go`
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
@@ -87,5 +158,4 @@ Contributions are welcome! Please create a pull request or open an issue for any
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
-This project is licensed under the AGPL-3.0 License.
|
||||||
|
-
|
||||||
|
+This project is licensed under the AGPL-3.0 License.
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/migrations/README b/migrations/README
|
||||||
|
deleted file mode 100644
|
||||||
|
index 0e04844..0000000
|
||||||
|
--- a/migrations/README
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1 +0,0 @@
|
||||||
|
-Single-database configuration for Flask.
|
||||||
|
diff --git a/migrations/alembic.ini b/migrations/alembic.ini
|
||||||
|
deleted file mode 100644
|
||||||
|
index ec9d45c..0000000
|
||||||
|
--- a/migrations/alembic.ini
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,50 +0,0 @@
|
||||||
|
-# A generic, single database configuration.
|
||||||
|
-
|
||||||
|
-[alembic]
|
||||||
|
-# template used to generate migration files
|
||||||
|
-# file_template = %%(rev)s_%%(slug)s
|
||||||
|
-
|
||||||
|
-# set to 'true' to run the environment during
|
||||||
|
-# the 'revision' command, regardless of autogenerate
|
||||||
|
-# revision_environment = false
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-# Logging configuration
|
||||||
|
-[loggers]
|
||||||
|
-keys = root,sqlalchemy,alembic,flask_migrate
|
||||||
|
-
|
||||||
|
-[handlers]
|
||||||
|
-keys = console
|
||||||
|
-
|
||||||
|
-[formatters]
|
||||||
|
-keys = generic
|
||||||
|
-
|
||||||
|
-[logger_root]
|
||||||
|
-level = WARN
|
||||||
|
-handlers = console
|
||||||
|
-qualname =
|
||||||
|
-
|
||||||
|
-[logger_sqlalchemy]
|
||||||
|
-level = WARN
|
||||||
|
-handlers =
|
||||||
|
-qualname = sqlalchemy.engine
|
||||||
|
-
|
||||||
|
-[logger_alembic]
|
||||||
|
-level = INFO
|
||||||
|
-handlers =
|
||||||
|
-qualname = alembic
|
||||||
|
-
|
||||||
|
-[logger_flask_migrate]
|
||||||
|
-level = INFO
|
||||||
|
-handlers =
|
||||||
|
-qualname = flask_migrate
|
||||||
|
-
|
||||||
|
-[handler_console]
|
||||||
|
-class = StreamHandler
|
||||||
|
-args = (sys.stderr,)
|
||||||
|
-level = NOTSET
|
||||||
|
-formatter = generic
|
||||||
|
-
|
||||||
|
-[formatter_generic]
|
||||||
|
-format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
-datefmt = %H:%M:%S
|
||||||
|
diff --git a/migrations/env.py b/migrations/env.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 4c97092..0000000
|
||||||
|
--- a/migrations/env.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,113 +0,0 @@
|
||||||
|
-import logging
|
||||||
|
-from logging.config import fileConfig
|
||||||
|
-
|
||||||
|
-from flask import current_app
|
||||||
|
-
|
||||||
|
-from alembic import context
|
||||||
|
-
|
||||||
|
-# this is the Alembic Config object, which provides
|
||||||
|
-# access to the values within the .ini file in use.
|
||||||
|
-config = context.config
|
||||||
|
-
|
||||||
|
-# Interpret the config file for Python logging.
|
||||||
|
-# This line sets up loggers basically.
|
||||||
|
-fileConfig(config.config_file_name)
|
||||||
|
-logger = logging.getLogger('alembic.env')
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def get_engine():
|
||||||
|
- try:
|
||||||
|
- # this works with Flask-SQLAlchemy<3 and Alchemical
|
||||||
|
- return current_app.extensions['migrate'].db.get_engine()
|
||||||
|
- except (TypeError, AttributeError):
|
||||||
|
- # this works with Flask-SQLAlchemy>=3
|
||||||
|
- return current_app.extensions['migrate'].db.engine
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def get_engine_url():
|
||||||
|
- try:
|
||||||
|
- return get_engine().url.render_as_string(hide_password=False).replace(
|
||||||
|
- '%', '%%')
|
||||||
|
- except AttributeError:
|
||||||
|
- return str(get_engine().url).replace('%', '%%')
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-# add your model's MetaData object here
|
||||||
|
-# for 'autogenerate' support
|
||||||
|
-# from myapp import mymodel
|
||||||
|
-# target_metadata = mymodel.Base.metadata
|
||||||
|
-config.set_main_option('sqlalchemy.url', get_engine_url())
|
||||||
|
-target_db = current_app.extensions['migrate'].db
|
||||||
|
-
|
||||||
|
-# other values from the config, defined by the needs of env.py,
|
||||||
|
-# can be acquired:
|
||||||
|
-# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
-# ... etc.
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def get_metadata():
|
||||||
|
- if hasattr(target_db, 'metadatas'):
|
||||||
|
- return target_db.metadatas[None]
|
||||||
|
- return target_db.metadata
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def run_migrations_offline():
|
||||||
|
- """Run migrations in 'offline' mode.
|
||||||
|
-
|
||||||
|
- This configures the context with just a URL
|
||||||
|
- and not an Engine, though an Engine is acceptable
|
||||||
|
- here as well. By skipping the Engine creation
|
||||||
|
- we don't even need a DBAPI to be available.
|
||||||
|
-
|
||||||
|
- Calls to context.execute() here emit the given string to the
|
||||||
|
- script output.
|
||||||
|
-
|
||||||
|
- """
|
||||||
|
- url = config.get_main_option("sqlalchemy.url")
|
||||||
|
- context.configure(
|
||||||
|
- url=url, target_metadata=get_metadata(), literal_binds=True
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
- with context.begin_transaction():
|
||||||
|
- context.run_migrations()
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def run_migrations_online():
|
||||||
|
- """Run migrations in 'online' mode.
|
||||||
|
-
|
||||||
|
- In this scenario we need to create an Engine
|
||||||
|
- and associate a connection with the context.
|
||||||
|
-
|
||||||
|
- """
|
||||||
|
-
|
||||||
|
- # this callback is used to prevent an auto-migration from being generated
|
||||||
|
- # when there are no changes to the schema
|
||||||
|
- # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||||
|
- def process_revision_directives(context, revision, directives):
|
||||||
|
- if getattr(config.cmd_opts, 'autogenerate', False):
|
||||||
|
- script = directives[0]
|
||||||
|
- if script.upgrade_ops.is_empty():
|
||||||
|
- directives[:] = []
|
||||||
|
- logger.info('No changes in schema detected.')
|
||||||
|
-
|
||||||
|
- conf_args = current_app.extensions['migrate'].configure_args
|
||||||
|
- if conf_args.get("process_revision_directives") is None:
|
||||||
|
- conf_args["process_revision_directives"] = process_revision_directives
|
||||||
|
-
|
||||||
|
- connectable = get_engine()
|
||||||
|
-
|
||||||
|
- with connectable.connect() as connection:
|
||||||
|
- context.configure(
|
||||||
|
- connection=connection,
|
||||||
|
- target_metadata=get_metadata(),
|
||||||
|
- **conf_args
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
- with context.begin_transaction():
|
||||||
|
- context.run_migrations()
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-if context.is_offline_mode():
|
||||||
|
- run_migrations_offline()
|
||||||
|
-else:
|
||||||
|
- run_migrations_online()
|
||||||
|
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
|
||||||
|
deleted file mode 100644
|
||||||
|
index 2c01563..0000000
|
||||||
|
--- a/migrations/script.py.mako
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,24 +0,0 @@
|
||||||
|
-"""${message}
|
||||||
|
-
|
||||||
|
-Revision ID: ${up_revision}
|
||||||
|
-Revises: ${down_revision | comma,n}
|
||||||
|
-Create Date: ${create_date}
|
||||||
|
-
|
||||||
|
-"""
|
||||||
|
-from alembic import op
|
||||||
|
-import sqlalchemy as sa
|
||||||
|
-${imports if imports else ""}
|
||||||
|
-
|
||||||
|
-# revision identifiers, used by Alembic.
|
||||||
|
-revision = ${repr(up_revision)}
|
||||||
|
-down_revision = ${repr(down_revision)}
|
||||||
|
-branch_labels = ${repr(branch_labels)}
|
||||||
|
-depends_on = ${repr(depends_on)}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def upgrade():
|
||||||
|
- ${upgrades if upgrades else "pass"}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def downgrade():
|
||||||
|
- ${downgrades if downgrades else "pass"}
|
||||||
|
diff --git a/migrations/versions/0e07095d2961_initial_migration.py b/migrations/versions/0e07095d2961_initial_migration.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 594c8d6..0000000
|
||||||
|
--- a/migrations/versions/0e07095d2961_initial_migration.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,99 +0,0 @@
|
||||||
|
-"""Initial migration
|
||||||
|
-
|
||||||
|
-Revision ID: 0e07095d2961
|
||||||
|
-Revises:
|
||||||
|
-Create Date: 2025-08-29 01:28:57.822103
|
||||||
|
-
|
||||||
|
-"""
|
||||||
|
-from alembic import op
|
||||||
|
-import sqlalchemy as sa
|
||||||
|
-from sqlalchemy.dialects import postgresql
|
||||||
|
-
|
||||||
|
-# revision identifiers, used by Alembic.
|
||||||
|
-revision = '0e07095d2961'
|
||||||
|
-down_revision = None
|
||||||
|
-branch_labels = None
|
||||||
|
-depends_on = None
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def upgrade():
|
||||||
|
- # ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
- op.drop_table('admins')
|
||||||
|
- with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
||||||
|
- batch_op.drop_index(batch_op.f('idx_subscribers_created_at'))
|
||||||
|
- batch_op.drop_index(batch_op.f('idx_subscribers_email'))
|
||||||
|
- batch_op.drop_index(batch_op.f('idx_subscribers_status'))
|
||||||
|
-
|
||||||
|
- op.drop_table('subscribers')
|
||||||
|
- op.drop_table('admin_users')
|
||||||
|
- op.drop_table('email_deliveries')
|
||||||
|
- with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
||||||
|
- batch_op.drop_index(batch_op.f('idx_newsletters_sent_at'))
|
||||||
|
-
|
||||||
|
- op.drop_table('newsletters')
|
||||||
|
- # ### end Alembic commands ###
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def downgrade():
|
||||||
|
- # ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
- op.create_table('newsletters',
|
||||||
|
- sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('newsletters_id_seq'::regclass)"), autoincrement=True, nullable=False),
|
||||||
|
- sa.Column('subject', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('body', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('sent_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('sent_by', sa.TEXT(), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('recipient_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('success_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('failure_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||||
|
- sa.PrimaryKeyConstraint('id', name='newsletters_pkey'),
|
||||||
|
- postgresql_ignore_search_path=False
|
||||||
|
- )
|
||||||
|
- with op.batch_alter_table('newsletters', schema=None) as batch_op:
|
||||||
|
- batch_op.create_index(batch_op.f('idx_newsletters_sent_at'), [sa.literal_column('sent_at DESC')], unique=False)
|
||||||
|
-
|
||||||
|
- op.create_table('email_deliveries',
|
||||||
|
- sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
- sa.Column('newsletter_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('sent_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True),
|
||||||
|
- sa.CheckConstraint("status = ANY (ARRAY['sent'::text, 'failed'::text, 'bounced'::text])", name=op.f('email_deliveries_status_check')),
|
||||||
|
- sa.ForeignKeyConstraint(['newsletter_id'], ['newsletters.id'], name=op.f('email_deliveries_newsletter_id_fkey')),
|
||||||
|
- sa.PrimaryKeyConstraint('id', name=op.f('email_deliveries_pkey'))
|
||||||
|
- )
|
||||||
|
- op.create_table('admin_users',
|
||||||
|
- sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
- sa.Column('username', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('password', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
|
||||||
|
- sa.PrimaryKeyConstraint('id', name=op.f('admin_users_pkey')),
|
||||||
|
- sa.UniqueConstraint('username', name=op.f('admin_users_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||||
|
- )
|
||||||
|
- op.create_table('subscribers',
|
||||||
|
- sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
- sa.Column('email', sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('subscribed_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('status', sa.TEXT(), server_default=sa.text("'active'::text"), autoincrement=False, nullable=True),
|
||||||
|
- sa.Column('source', sa.TEXT(), server_default=sa.text("'manual'::text"), autoincrement=False, nullable=True),
|
||||||
|
- sa.CheckConstraint("status = ANY (ARRAY['active'::text, 'unsubscribed'::text])", name=op.f('subscribers_status_check')),
|
||||||
|
- sa.PrimaryKeyConstraint('id', name=op.f('subscribers_pkey')),
|
||||||
|
- sa.UniqueConstraint('email', name=op.f('subscribers_email_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||||
|
- )
|
||||||
|
- with op.batch_alter_table('subscribers', schema=None) as batch_op:
|
||||||
|
- batch_op.create_index(batch_op.f('idx_subscribers_status'), ['status'], unique=False)
|
||||||
|
- batch_op.create_index(batch_op.f('idx_subscribers_email'), ['email'], unique=False)
|
||||||
|
- batch_op.create_index(batch_op.f('idx_subscribers_created_at'), [sa.literal_column('created_at DESC')], unique=False)
|
||||||
|
-
|
||||||
|
- op.create_table('admins',
|
||||||
|
- sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
- sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('password_hash', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
|
||||||
|
- sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
|
||||||
|
- sa.PrimaryKeyConstraint('id', name=op.f('admins_pkey')),
|
||||||
|
- sa.UniqueConstraint('username', name=op.f('admins_username_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||||
|
- )
|
||||||
|
- # ### end Alembic commands ###
|
||||||
|
diff --git a/models/User/user.py b/models/User/user.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 552796c..0000000
|
||||||
|
--- a/models/User/user.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,40 +0,0 @@
|
||||||
|
-from models.UserProfile.user_profile import UserProfile
|
||||||
|
-from werkzeug.security import generate_password_hash, check_password_hash
|
||||||
|
-from models import db
|
||||||
|
-from sqlalchemy import event
|
||||||
|
-
|
||||||
|
-class User(db.Model):
|
||||||
|
- __tablename__ = 'users'
|
||||||
|
-
|
||||||
|
- id = db.Column(db.Integer, primary_key=True)
|
||||||
|
- username = db.Column(db.String(80), unique=True, nullable=False)
|
||||||
|
- email = db.Column(db.String(120), unique=True, nullable=False) # Add email field
|
||||||
|
- _password = db.Column("password", db.String(255), nullable=False)
|
||||||
|
-
|
||||||
|
- profile = db.relationship('UserProfile', back_populates='user', uselist=False, cascade="all, delete-orphan")
|
||||||
|
-
|
||||||
|
- @property
|
||||||
|
- def password(self):
|
||||||
|
- return self._password
|
||||||
|
-
|
||||||
|
- @password.setter
|
||||||
|
- def password(self, raw_password):
|
||||||
|
- if not raw_password.startswith("pbkdf2:sha256:"):
|
||||||
|
- self._password = generate_password_hash(raw_password)
|
||||||
|
- else:
|
||||||
|
- self._password = raw_password
|
||||||
|
-
|
||||||
|
- def check_password(self, password):
|
||||||
|
- return check_password_hash(self._password, password)
|
||||||
|
-
|
||||||
|
-@event.listens_for(User, 'after_insert')
|
||||||
|
-def create_user_profile(mapper, connection, target):
|
||||||
|
- connection.execute(
|
||||||
|
- UserProfile.__table__.insert().values(
|
||||||
|
- user_id=target.id,
|
||||||
|
- first_name="",
|
||||||
|
- last_name="",
|
||||||
|
- bio="",
|
||||||
|
- profile_picture=""
|
||||||
|
- )
|
||||||
|
- )
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/models/UserProfile/user_profile.py b/models/UserProfile/user_profile.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index d3fa194..0000000
|
||||||
|
--- a/models/UserProfile/user_profile.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,13 +0,0 @@
|
||||||
|
-from models import db
|
||||||
|
-
|
||||||
|
-class UserProfile(db.Model):
|
||||||
|
- __tablename__ = 'user_profiles'
|
||||||
|
-
|
||||||
|
- id = db.Column(db.Integer, primary_key=True)
|
||||||
|
- user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
|
||||||
|
- first_name = db.Column(db.String(50), nullable=False, default="")
|
||||||
|
- last_name = db.Column(db.String(50), nullable=False, default="")
|
||||||
|
- bio = db.Column(db.Text, default="")
|
||||||
|
- profile_picture = db.Column(db.String(255), default="")
|
||||||
|
-
|
||||||
|
- user = db.relationship('User', back_populates='profile')
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/models/__init__.py b/models/__init__.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 8dd3fe9..0000000
|
||||||
|
--- a/models/__init__.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,22 +0,0 @@
|
||||||
|
-import os
|
||||||
|
-from flask_sqlalchemy import SQLAlchemy
|
||||||
|
-from dotenv import load_dotenv
|
||||||
|
-from urllib.parse import quote_plus
|
||||||
|
-
|
||||||
|
-load_dotenv()
|
||||||
|
-
|
||||||
|
-PG_USER = quote_plus(os.getenv("PG_USER", "postgres"))
|
||||||
|
-PG_PASSWORD = quote_plus(os.getenv("PG_PASSWORD", "postgres"))
|
||||||
|
-PG_HOST = os.getenv("PG_HOST", "localhost")
|
||||||
|
-PG_PORT = os.getenv("PG_PORT", "5432")
|
||||||
|
-PG_DATABASE = os.getenv("PG_DATABASE", "rideaware")
|
||||||
|
-
|
||||||
|
-DATABASE_URI = f"postgresql+psycopg2://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}"
|
||||||
|
-
|
||||||
|
-db = SQLAlchemy()
|
||||||
|
-
|
||||||
|
-def init_db(app):
|
||||||
|
- """Initialize the SQLAlchemy app with the configuration."""
|
||||||
|
- app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
|
||||||
|
- app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||||
|
- db.init_app(app)
|
||||||
|
diff --git a/requirements.txt b/requirements.txt
|
||||||
|
deleted file mode 100644
|
||||||
|
index 001e473..0000000
|
||||||
|
--- a/requirements.txt
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,8 +0,0 @@
|
||||||
|
-Flask
|
||||||
|
-flask_bcrypt
|
||||||
|
-flask_cors
|
||||||
|
-flask_sqlalchemy
|
||||||
|
-python-dotenv
|
||||||
|
-werkzeug
|
||||||
|
-psycopg2-binary
|
||||||
|
-Flask-Migrate
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/routes/user_auth/auth.py b/routes/user_auth/auth.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 899d7ba..0000000
|
||||||
|
--- a/routes/user_auth/auth.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,60 +0,0 @@
|
||||||
|
-from flask import Blueprint, request, jsonify, session
|
||||||
|
-from services.UserService.user import UserService
|
||||||
|
-
|
||||||
|
-auth_bp = Blueprint("auth", __name__, url_prefix="/api")
|
||||||
|
-user_service = UserService()
|
||||||
|
-
|
||||||
|
-@auth_bp.route("/signup", methods=["POST"])
|
||||||
|
-def signup():
|
||||||
|
- data = request.get_json()
|
||||||
|
- if not data:
|
||||||
|
- return jsonify({"message": "No data provided"}), 400
|
||||||
|
-
|
||||||
|
- required_fields = ['username', 'password']
|
||||||
|
- for field in required_fields:
|
||||||
|
- if not data.get(field):
|
||||||
|
- return jsonify({"message": f"{field} is required"}), 400
|
||||||
|
-
|
||||||
|
- try:
|
||||||
|
- new_user = user_service.create_user(
|
||||||
|
- username=data["username"],
|
||||||
|
- password=data["password"],
|
||||||
|
- email=data.get("email"),
|
||||||
|
- first_name=data.get("first_name"),
|
||||||
|
- last_name=data.get("last_name")
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
- return jsonify({
|
||||||
|
- "message": "User created successfully",
|
||||||
|
- "username": new_user.username,
|
||||||
|
- "user_id": new_user.id
|
||||||
|
- }), 201
|
||||||
|
-
|
||||||
|
- except ValueError as e:
|
||||||
|
- return jsonify({"message": str(e)}), 400
|
||||||
|
- except Exception as e:
|
||||||
|
- # Log the error
|
||||||
|
- print(f"Signup error: {e}")
|
||||||
|
- return jsonify({"message": "Internal server error"}), 500
|
||||||
|
-
|
||||||
|
-@auth_bp.route("/login", methods=["POST"])
|
||||||
|
-def login():
|
||||||
|
- data = request.get_json()
|
||||||
|
- username = data.get("username")
|
||||||
|
- password = data.get("password")
|
||||||
|
- print(f"Login attempt: username={username}, password={password}")
|
||||||
|
- try:
|
||||||
|
- user = user_service.verify_user(username, password)
|
||||||
|
- session["user_id"] = user.id
|
||||||
|
- return jsonify({"message": "Login successful", "user_id": user.id}), 200
|
||||||
|
- except ValueError as e:
|
||||||
|
- print(f"Login failed: {str(e)}")
|
||||||
|
- return jsonify({"error": str(e)}), 401
|
||||||
|
- except Exception as e:
|
||||||
|
- print(f"Login error: {e}")
|
||||||
|
- return jsonify({"error": "Internal server error"}), 500
|
||||||
|
-
|
||||||
|
-@auth_bp.route("/logout", methods=["POST"])
|
||||||
|
-def logout():
|
||||||
|
- session.clear()
|
||||||
|
- return jsonify({"message": "Logout successful"}), 200
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/scripts/migrate.sh b/scripts/migrate.sh
|
||||||
|
deleted file mode 100644
|
||||||
|
index 405f399..0000000
|
||||||
|
--- a/scripts/migrate.sh
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,8 +0,0 @@
|
||||||
|
-#!/bin/bash
|
||||||
|
-set -e
|
||||||
|
-
|
||||||
|
-echo "Running database migrations..."
|
||||||
|
-flask db upgrade
|
||||||
|
-
|
||||||
|
-echo "Starting application..."
|
||||||
|
-exec "$@"
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/server.py b/server.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 5800353..0000000
|
||||||
|
--- a/server.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,33 +0,0 @@
|
||||||
|
-import os
|
||||||
|
-from flask import Flask
|
||||||
|
-from flask_cors import CORS
|
||||||
|
-from dotenv import load_dotenv
|
||||||
|
-from flask_migrate import Migrate
|
||||||
|
-from flask.cli import FlaskGroup
|
||||||
|
-
|
||||||
|
-from models import db, init_db
|
||||||
|
-from routes.user_auth import auth
|
||||||
|
-
|
||||||
|
-load_dotenv()
|
||||||
|
-
|
||||||
|
-app = Flask(__name__)
|
||||||
|
-app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
|
||||||
|
-app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE")
|
||||||
|
-app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||||
|
-
|
||||||
|
-CORS(app)
|
||||||
|
-
|
||||||
|
-init_db(app)
|
||||||
|
-migrate = Migrate(app, db)
|
||||||
|
-app.register_blueprint(auth.auth_bp)
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-@app.route("/health")
|
||||||
|
-def health_check():
|
||||||
|
- """Health check endpoint."""
|
||||||
|
- return "OK", 200
|
||||||
|
-
|
||||||
|
-cli = FlaskGroup(app)
|
||||||
|
-
|
||||||
|
-if __name__ == "__main__":
|
||||||
|
- cli()
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/services/UserService/user.py b/services/UserService/user.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 6f1c030..0000000
|
||||||
|
--- a/services/UserService/user.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,60 +0,0 @@
|
||||||
|
-from models.User.user import User
|
||||||
|
-from models.UserProfile.user_profile import UserProfile
|
||||||
|
-from models import db
|
||||||
|
-import re
|
||||||
|
-
|
||||||
|
-class UserService:
|
||||||
|
- def create_user(self, username, password, email=None, first_name=None, last_name=None):
|
||||||
|
- if not username or not password:
|
||||||
|
- raise ValueError("Username and password are required")
|
||||||
|
-
|
||||||
|
- if email:
|
||||||
|
- email_regex = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
||||||
|
- if not re.match(email_regex, email):
|
||||||
|
- raise ValueError("Invalid email format")
|
||||||
|
-
|
||||||
|
- existing_user = User.query.filter(
|
||||||
|
- (User.username == username) | (User.email == email)
|
||||||
|
- ).first()
|
||||||
|
-
|
||||||
|
- if existing_user:
|
||||||
|
- if existing_user.username == username:
|
||||||
|
- raise ValueError("Username already exists")
|
||||||
|
- else:
|
||||||
|
- raise ValueError("Email already exists")
|
||||||
|
-
|
||||||
|
- if len(password) < 8:
|
||||||
|
- raise ValueError("Password must be at least 8 characters long")
|
||||||
|
-
|
||||||
|
- try:
|
||||||
|
- new_user = User(
|
||||||
|
- username=username,
|
||||||
|
- email=email or "",
|
||||||
|
- password=password
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
- db.session.add(new_user)
|
||||||
|
- db.session.flush()
|
||||||
|
-
|
||||||
|
- user_profile = UserProfile(
|
||||||
|
- user_id=new_user.id,
|
||||||
|
- first_name=first_name or "",
|
||||||
|
- last_name=last_name or "",
|
||||||
|
- bio="",
|
||||||
|
- profile_picture=""
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
- db.session.add(user_profile)
|
||||||
|
- db.session.commit()
|
||||||
|
-
|
||||||
|
- return new_user
|
||||||
|
-
|
||||||
|
- except Exception as e:
|
||||||
|
- db.session.rollback()
|
||||||
|
- raise Exception(f"Error creating user: {str(e)}")
|
||||||
|
-
|
||||||
|
- def verify_user(self, username, password):
|
||||||
|
- user = User.query.filter_by(username=username).first()
|
||||||
|
- if not user or not user.check_password(password):
|
||||||
|
- raise ValueError("Invalid username or password")
|
||||||
|
- return user
|
||||||
|
\ No newline at end of file
|
||||||
Loading…
Add table
Add a link
Reference in a new issue