Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions alembic.ini
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What exactly does this file do? I think alembic has already been initialized, but maybe we were missing this file?

Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
[alembic]
script_location = migrations

[loggers]
keys = root,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = DEBUG
handlers = console
qualname =

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = DEBUG
formatter = generic

[formatter_generic]
format = %(levelname)s: %(message)s
83 changes: 67 additions & 16 deletions app.py
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you mean to move scrape_classes out of the if statement? This will cause scrape_classes to run during migrations, which might cause an error because the migration is not complete before we run the scrape.

Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
import logging
from datetime import datetime
from src.utils.constants import SERVICE_ACCOUNT_PATH, JWT_SECRET_KEY
import os
import sentry_sdk
from flask import Flask, render_template
from flask_apscheduler import APScheduler
from flask_graphql import GraphQLView
from flask_jwt_extended import JWTManager
import sys
from graphene import Schema
from graphql.utils import schema_printer
from src.database import db_session, init_db
Expand All @@ -15,22 +21,42 @@

# Check if we're in migration mode with error handling
try:
FLASK_MIGRATE = os.getenv('FLASK_MIGRATE', 'false').lower() == 'true'
FLASK_MIGRATE = os.getenv("FLASK_MIGRATE", "false").lower() == "true"
except Exception as e:
logging.warning(f"Error reading FLASK_MIGRATE environment variable: {e}. Defaulting to false.")
FLASK_MIGRATE = False

# Only import scraping-related modules if not in migration mode
if not FLASK_MIGRATE:
from flask_apscheduler import APScheduler
from src.scrapers.capacities_scraper import fetch_capacities
from src.scrapers.capacities_scraper import fetch_capacities, update_hourly_capacity
from src.scrapers.reg_hours_scraper import fetch_reg_building, fetch_reg_facility
from src.scrapers.scraper_helpers import clean_past_hours
from src.scrapers.sp_hours_scraper import fetch_sp_facility
from src.scrapers.equipment_scraper import scrape_equipment
from src.scrapers.class_scraper import fetch_classes
from src.scrapers.activities_scraper import fetch_activity
from src.utils.utils import create_gym_table
from src.models.workout_reminder import WorkoutReminder
from src.models.user import User as UserModel
from src.utils.messaging import send_workout_reminders
from src.models.openhours import OpenHours
from src.models.enums import DayOfWeekEnum
import firebase_admin
from firebase_admin import credentials, messaging


def initialize_firebase():
if not firebase_admin._apps:
if SERVICE_ACCOUNT_PATH:
cred = credentials.Certificate(SERVICE_ACCOUNT_PATH)
firebase_app = firebase_admin.initialize_app(cred)
else:
raise ValueError("GOOGLE_SERVICE_ACCOUNT_PATH environment variable not set.")
else:
firebase_app = firebase_admin.get_app()
logging.info("Firebase app created...")
return firebase_app

sentry_sdk.init(
dsn="https://2a96f65cca45d8a7c3ffc3b878d4346b@o4507365244010496.ingest.us.sentry.io/4507850536386560",
Expand All @@ -40,20 +66,24 @@

app = Flask(__name__)
app.debug = True
initialize_firebase()

# Verify all required variables are present
if not all([db_user, db_password, db_name, db_host, db_port]):
raise ValueError(
"Missing required database configuration. "
"Please ensure all database environment variables are set."
"Missing required database configuration. " "Please ensure all database environment variables are set."
)

app.config['SQLALCHEMY_DATABASE_URI'] = db_url
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config["SQLALCHEMY_DATABASE_URI"] = db_url
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
migrate = Migrate(app, db)
schema = Schema(query=Query, mutation=Mutation)
swagger = Swagger(app)

app.config["JWT_SECRET_KEY"] = JWT_SECRET_KEY
jwt = JWTManager(app)


def should_run_initial_scrape():
"""
Check if we should run initial scraping:
Expand All @@ -64,10 +94,11 @@ def should_run_initial_scrape():
if FLASK_MIGRATE:
return False
# Check if we're in the main process
werkzeug_var = os.environ.get('WERKZEUG_RUN_MAIN')
werkzeug_var = os.environ.get("WERKZEUG_RUN_MAIN")
# Logic: if in local, then werkzeug_var exists: so only run when true to prevent double running
# If in Gunicorn, then werkzeug_var is None, so then it will also run
return werkzeug_var is None or werkzeug_var == 'true'
return werkzeug_var is None or werkzeug_var == "true"


# Initialize scheduler only if not in migration mode
if not FLASK_MIGRATE:
Expand All @@ -83,12 +114,15 @@ def should_run_initial_scrape():
def index():
return render_template("index.html")


app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True))


@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()


# Only define scheduler tasks if not in migration mode
if not FLASK_MIGRATE:
# Scrape hours every 15 minutes
Expand All @@ -114,14 +148,31 @@ def scrape_capacities():
except Exception as e:
logging.error(f"Error in scrape_capacities: {e}")

# Scrape classes every hour
@scheduler.task("interval", id="scrape_classes", seconds=3600)
def scrape_classes():
try:
logging.info("Scraping classes from group-fitness-classes...")
fetch_classes(10)
except Exception as e:
logging.error(f"Error in scrape_classes: {e}")

# Scrape classes every hour
@scheduler.task("interval", id="scrape_classes", seconds=3600)
def scrape_classes():
logging.info("Scraping classes from group-fitness-classes...")
fetch_classes(10)


# Send workout reminders every morning at 12:00 AM
@scheduler.task("cron", id="send_reminders", hour=0, minute=0)
def scheduled_job():
logging.info("Sending workout reminders...")
send_workout_reminders()


# Update hourly average capacity every hour
@scheduler.task("cron", id="update_capacity", hour="*")
def scheduled_job():
current_time = datetime.now()
current_day = current_time.strftime("%A").upper()
current_hour = current_time.hour

logging.info(f"Updating hourly average capacity for {current_day}, hour {current_hour}...")
update_hourly_capacity(current_day, current_hour)


# Create database
init_db()
Expand Down
45 changes: 0 additions & 45 deletions migrations/alembic.ini

This file was deleted.

93 changes: 28 additions & 65 deletions migrations/env.py
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there an explanation for changes to this file? I'm worried during deployment there will be a lot to debug due to the changes in migration code

Original file line number Diff line number Diff line change
@@ -1,95 +1,58 @@
from __future__ import with_statement
import sys
import os
from flask import current_app

import logging
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# This sets up logging with a fallback if the config is missing or incorrect
try:
fileConfig(context.config.config_file_name)
except KeyError:
logging.basicConfig(level=logging.INFO, format="%(levelname)-5.5s [%(name)s] %(message)s")

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
print(f"Using Alembic config file: {context.config.config_file_name}")

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
# Add your project directory to the Python path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
from app import app

# Alembic Config object
config = context.config

def run_migrations_offline():
"""Run migrations in 'offline' mode.
# # Configure logging
# fileConfig(config.config_file_name)

This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
# Set SQLAlchemy URL and metadata
with app.app_context():
config.set_main_option('sqlalchemy.url', current_app.config['SQLALCHEMY_DATABASE_URI'])
target_metadata = current_app.extensions['migrate'].db.metadata

Calls to context.execute() here emit the given string to the
script output.

"""
def run_migrations_offline():
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)

context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.

In this scenario we need to create an Engine
and associate a connection with the context.

"""

# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')

"""Run migrations in 'online' mode."""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)

context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations_online()
Loading
Loading