2 Commits

Author SHA1 Message Date
4d38c8715e fixed preferred view 2025-06-04 08:55:31 +02:00
8edd96b671 remove celery 2025-06-04 08:33:04 +02:00
7 changed files with 21 additions and 142 deletions

11
app.py
View File

@@ -6,7 +6,6 @@ import os
from models import User, SiteSettings
from flask_wtf.csrf import generate_csrf
from routes.room_files import room_files_bp
from routes.user import user_bp
from routes.room_members import room_members_bp
from routes.trash import trash_bp
from tasks import cleanup_trash
@@ -14,7 +13,6 @@ import click
from utils import timeago
from extensions import db, login_manager, csrf
from utils.email_templates import create_default_templates
from celery_worker import init_celery, celery
# Load environment variables
load_dotenv()
@@ -36,9 +34,6 @@ def create_app():
login_manager.login_view = 'auth.login'
csrf.init_app(app)
# Initialize Celery
init_celery(app)
@app.context_processor
def inject_csrf_token():
return dict(csrf_token=generate_csrf())
@@ -63,12 +58,9 @@ def create_app():
try:
# Check database connection
db.session.execute('SELECT 1')
# Check Redis connection
celery.control.inspect().ping()
return jsonify({
'status': 'healthy',
'database': 'connected',
'redis': 'connected'
'database': 'connected'
}), 200
except Exception as e:
return jsonify({
@@ -80,7 +72,6 @@ def create_app():
from routes import init_app
init_app(app)
app.register_blueprint(room_files_bp, url_prefix='/api/rooms')
app.register_blueprint(user_bp, url_prefix='/api/users')
app.register_blueprint(room_members_bp, url_prefix='/api/rooms')
app.register_blueprint(trash_bp, url_prefix='/api/trash')

View File

@@ -1,51 +0,0 @@
from celery import Celery
from flask import current_app
import os
import logging
# Configure logging
logger = logging.getLogger(__name__)
# Get Redis URL from environment variable or use default
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
# Configure Celery
celery = Celery(
'docupulse',
backend=REDIS_URL,
broker=REDIS_URL,
# Add some default configuration
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='UTC',
enable_utc=True,
# Add retry configuration
task_acks_late=True,
task_reject_on_worker_lost=True,
task_default_retry_delay=300, # 5 minutes
task_max_retries=3
)
def init_celery(app):
"""Initialize Celery with Flask app context"""
celery.conf.update(app.config)
class ContextTask(celery.Task):
"""Celery task that runs within Flask app context"""
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Handle task failure"""
logger.error(f'Task {task_id} failed: {exc}')
super().on_failure(exc, task_id, args, kwargs, einfo)
def on_retry(self, exc, task_id, args, kwargs, einfo):
"""Handle task retry"""
logger.warning(f'Task {task_id} is being retried: {exc}')
super().on_retry(exc, task_id, args, kwargs, einfo)
celery.Task = ContextTask
return celery

View File

@@ -13,12 +13,10 @@ services:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=docupulse
- REDIS_URL=redis://redis:6379/0
volumes:
- uploads:/app/uploads
- docupulse_uploads:/app/uploads
depends_on:
- db
- redis
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5000/health"]
@@ -38,7 +36,7 @@ services:
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=docupulse
volumes:
- postgres_data:/var/lib/postgresql/data
- docupulse_postgres_data:/var/lib/postgresql/data
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
@@ -46,46 +44,18 @@ services:
timeout: 10s
retries: 3
redis:
image: redis:7
ports:
- "6379:6379"
restart: unless-stopped
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
celery_worker:
build: .
command: celery -A celery_worker.celery worker --loglevel=info
volumes:
- .:/app
environment:
- FLASK_APP=app.py
- FLASK_ENV=development
- DATABASE_URL=postgresql://postgres:postgres@db:5432/docupulse
- REDIS_URL=redis://redis:6379/0
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
restart: unless-stopped
healthcheck:
test: ["CMD", "celery", "-A", "celery_worker.celery", "inspect", "ping"]
interval: 30s
timeout: 10s
retries: 3
deploy:
resources:
limits:
cpus: '0.5'
memory: 512M
volumes:
postgres_data:
name: ${COMPOSE_PROJECT_NAME:-default}_postgres_data
uploads:
name: ${COMPOSE_PROJECT_NAME:-default}_uploads
docupulse_postgres_data:
name: docupulse_${COMPOSE_PROJECT_NAME:-default}_postgres_data
driver: local
driver_opts:
type: none
device: /var/lib/docupulse/postgres/${COMPOSE_PROJECT_NAME:-default}
o: bind
docupulse_uploads:
name: docupulse_${COMPOSE_PROJECT_NAME:-default}_uploads
driver: local
driver_opts:
type: none
device: /var/lib/docupulse/uploads/${COMPOSE_PROJECT_NAME:-default}
o: bind

View File

@@ -6,7 +6,6 @@ echo "POSTGRES_USER: $POSTGRES_USER"
echo "POSTGRES_PASSWORD: $POSTGRES_PASSWORD"
echo "POSTGRES_DB: $POSTGRES_DB"
echo "DATABASE_URL: $DATABASE_URL"
echo "REDIS_URL: $REDIS_URL"
# Wait for the database to be ready
echo "Waiting for database to be ready..."
@@ -15,13 +14,6 @@ while ! nc -z db 5432; do
done
echo "Database is ready!"
# Wait for Redis to be ready
echo "Waiting for Redis to be ready..."
while ! nc -z redis 6379; do
sleep 0.1
done
echo "Redis is ready!"
# Wait for PostgreSQL to be ready to accept connections
echo "Waiting for PostgreSQL to accept connections..."
until PGPASSWORD=$POSTGRES_PASSWORD psql -h db -U $POSTGRES_USER -d $POSTGRES_DB -c '\q'; do

View File

@@ -10,8 +10,5 @@ python-dotenv>=0.19.0
psycopg2-binary==2.9.9
gunicorn==21.2.0
email_validator==2.1.0.post1
celery>=5.3.0
redis>=4.5.0
alembic>=1.7.0
flower>=2.0.0
prometheus-client>=0.16.0

View File

@@ -16,6 +16,7 @@ def init_app(app: Flask):
from .conversations import conversations_bp as conversations_routes
from .admin import admin as admin_routes
from .email_templates import email_templates as email_templates_routes
from .user import user_bp as user_routes
# Initialize routes
init_main_routes(main_bp)
@@ -35,6 +36,7 @@ def init_app(app: Flask):
app.register_blueprint(conversations_routes)
app.register_blueprint(admin_routes)
app.register_blueprint(email_templates_routes)
app.register_blueprint(user_routes)
@app.route('/rooms/<int:room_id>/trash')
@login_required

View File

@@ -10,7 +10,6 @@ from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.utils import formatdate
import json
from celery_worker import celery
logger = logging.getLogger(__name__)
@@ -31,16 +30,9 @@ def get_smtp_settings() -> Optional[Dict[str, Any]]:
logger.error(f"Error retrieving SMTP settings: {str(e)}")
return None
@celery.task
def send_email_task(mail_id: int):
"""Celery task to send an email asynchronously"""
def send_email_via_smtp(mail: Mail) -> bool:
"""Send an email synchronously"""
try:
# Get the mail record
mail = Mail.query.get(mail_id)
if not mail:
logger.error(f"Mail record not found for ID: {mail_id}")
return False
# Get SMTP settings
smtp_settings = get_smtp_settings()
if not smtp_settings:
@@ -76,20 +68,6 @@ def send_email_task(mail_id: int):
except Exception as e:
logger.error(f"Error sending email: {str(e)}")
if mail:
mail.status = 'failed'
mail.error_message = str(e)
db.session.commit()
return False
def send_email_via_smtp(mail: Mail) -> bool:
"""Queue an email to be sent asynchronously"""
try:
# Queue the email sending task
send_email_task.delay(mail.id)
return True
except Exception as e:
logger.error(f"Error queueing email: {str(e)}")
mail.status = 'failed'
mail.error_message = str(e)
db.session.commit()