-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdatabase.py
More file actions
74 lines (62 loc) · 2.03 KB
/
database.py
File metadata and controls
74 lines (62 loc) · 2.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from models import Base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from config import Config
import logging
from typing import Generator
# Configure logging
logging.basicConfig(level=getattr(logging, Config.LOG_LEVEL))
logger = logging.getLogger(__name__)
# Create database engine
engine = create_engine(
Config.DATABASE_URL,
echo=False,
pool_pre_ping=True,
pool_recycle=300
)
# Create session factory
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Create all tables
Base.metadata.create_all(bind=engine)
def get_db() -> Generator[Session, None, None]:
"""Dependency to get database session"""
db = SessionLocal()
try:
yield db
except Exception as e:
logger.error(f"Database session error: {e}")
db.rollback()
raise
finally:
db.close()
def init_database():
"""Initialize database tables"""
try:
Base.metadata.create_all(bind=engine)
logger.info("Database initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize database: {e}")
raise
def cleanup_old_data():
"""Clean up old data based on retention policy"""
from datetime import datetime, timedelta
from models import FinancialNews, ScrapingLog
db = SessionLocal()
try:
cutoff_date = datetime.now() - timedelta(days=Config.DATA_RETENTION_DAYS)
# Delete old news articles
deleted_news = db.query(FinancialNews).filter(
FinancialNews.published_date < cutoff_date
).delete()
# Delete old scraping logs
deleted_logs = db.query(ScrapingLog).filter(
ScrapingLog.start_time < cutoff_date
).delete()
db.commit()
logger.info(f"Cleaned up {deleted_news} old news articles and {deleted_logs} old logs")
except Exception as e:
logger.error(f"Error during cleanup: {e}")
db.rollback()
raise
finally:
db.close()