105 Commits

Author SHA1 Message Date
ea841e4d54 final migration issues fixed 2025-06-02 21:11:13 +02:00
5c6c3f436e more robust migrations 2025-06-02 18:58:48 +02:00
4dbaa27cba fixed migrations 2025-06-02 16:11:56 +02:00
c95a1c456b sending email async with celery 2025-06-02 14:55:50 +02:00
66ac834ab0 Update recent_activity.html 2025-06-02 14:36:34 +02:00
81ee935150 sending mails using SMTP 2025-06-02 14:33:04 +02:00
765c07316a SMTP Settings 2025-06-02 14:30:20 +02:00
694c8df364 Update email_templates.html 2025-06-02 13:15:35 +02:00
220d892fa0 template fix 2025-06-02 12:07:12 +02:00
75127394c7 fix settings page csrf 2025-06-02 11:46:42 +02:00
11745f2eb8 Better fill codes 2025-06-02 11:03:42 +02:00
fdef0c5f66 better reader for mails 2025-06-02 10:33:26 +02:00
5a9b6be79d email log 2025-06-02 09:30:42 +02:00
38e24a690a Update email_templates.html 2025-06-02 09:19:40 +02:00
7d08a57c85 add mail to table on notif 2025-06-02 09:17:21 +02:00
17e0781b14 add defaults templates 2025-06-02 08:52:45 +02:00
b06a282160 migration fixes 2025-06-02 08:34:46 +02:00
e8d79cca19 email templates page 2025-06-01 22:00:45 +02:00
047ad6ef10 Add email template table 2025-06-01 20:09:42 +02:00
06772ed48c Update storage_usage.html 2025-06-01 14:18:35 +02:00
b9233136a7 fixed some issues 2025-06-01 14:11:19 +02:00
85bfd0f3ae update file-grid 2025-06-01 13:33:55 +02:00
2800da1859 even better preview! 2025-06-01 12:46:06 +02:00
3a768146c1 better preview visuals 2025-06-01 12:43:24 +02:00
ea118a37c5 better preview of files 2025-06-01 12:38:51 +02:00
aeefd17b10 File preview 2025-06-01 12:31:10 +02:00
c0a97a1714 Update app.py 2025-05-31 23:22:42 +02:00
b55a919944 Update app.py 2025-05-31 23:20:24 +02:00
3e7f7ff636 Update app.py 2025-05-31 23:19:11 +02:00
e1390a8adc fix event filtering 2025-05-31 23:15:46 +02:00
1c74706736 Update entrypoint.sh 2025-05-31 23:12:35 +02:00
58c23a6380 notif changes 2025-05-31 23:10:00 +02:00
779e81346b unread notifs 2025-05-31 23:08:38 +02:00
08a11c240d room notifications 2025-05-31 22:58:31 +02:00
c452a920b1 fixed editing members 2025-05-31 22:53:52 +02:00
fda5655533 added extra log 2025-05-31 22:19:59 +02:00
ac49c842b8 Update base.html 2025-05-31 19:17:58 +02:00
a9c0debd6c unified dashboard cards view all buttons 2025-05-31 19:10:41 +02:00
c2f06a8e15 Update contacts.html 2025-05-31 19:09:41 +02:00
2c9b302a69 added events to event system 2025-05-31 19:07:29 +02:00
224d4d400e Add notif page 2025-05-31 18:28:53 +02:00
5e5d1beb5e fix csrf token on profile 2025-05-31 18:15:50 +02:00
4e6bf7b03c Btter version for convos 2025-05-31 13:02:56 +02:00
4bd5180b87 storage cards on dash 2025-05-31 13:00:06 +02:00
90bca4c93b dashboard improvement 2025-05-31 12:53:41 +02:00
36695c1398 fix some style issues 2025-05-31 12:34:16 +02:00
fb2837e523 change checkbox colors 2025-05-31 12:28:32 +02:00
45a1bc07c6 search inside folders 2025-05-31 12:24:48 +02:00
4494ebdeb3 centered modal 2025-05-31 12:20:35 +02:00
4bb776f801 fix paging on events 2025-05-31 12:18:52 +02:00
e0be56a7f4 fix file rename 2025-05-31 12:11:06 +02:00
821330eba5 Fix name not in rename modal 2025-05-31 12:09:24 +02:00
f13f5a1e08 fix upload multi 2025-05-30 23:31:14 +02:00
0d5fd83e01 fixed filters 2025-05-30 21:31:25 +02:00
50f7e115d6 fox more event logging 2025-05-30 21:19:48 +02:00
f7853f96ed fixing logs on multiple rooms 2025-05-30 21:08:58 +02:00
a08345e676 added a lot of logging 2025-05-30 20:43:14 +02:00
c09a5c758e fix a bunch is settings 2025-05-30 20:32:40 +02:00
43f29f9a46 update settings logs 2025-05-30 13:53:18 +02:00
24612879a1 Update conversations.html 2025-05-30 13:52:13 +02:00
7723cd0d70 logging auth, conversations, and contacts 2025-05-30 13:48:07 +02:00
9159817947 fix all csfr token issues 2025-05-30 13:22:51 +02:00
fee79c6ec7 Documentation in room files py 2025-05-30 12:57:25 +02:00
986db28494 user update logs 2025-05-29 23:02:30 +02:00
37fcc5f34c fixed some issues with profile and events 2025-05-29 22:40:59 +02:00
8f24e21d5d user logging 2025-05-29 22:33:05 +02:00
5dbdd43785 utils and event logging 2025-05-29 15:19:42 +02:00
6d959ac253 Revert "Added events system"
This reverts commit f00d569db3.
2025-05-29 14:45:52 +02:00
f00d569db3 Added events system 2025-05-29 14:27:15 +02:00
3174f8fa5b add events table 2025-05-29 13:57:28 +02:00
5ecb8c956c add notification page 2025-05-29 10:16:58 +02:00
096a70bb5d better mobile dash 2025-05-28 22:45:43 +02:00
4f8261bda9 cache busting JS files 2025-05-28 21:55:26 +02:00
c8dd4ac165 cache busting on CSS files 2025-05-28 21:50:18 +02:00
b70e4624cb mobile settings for base template 2025-05-28 21:36:03 +02:00
5c5829c487 documentation for all JS files 2025-05-28 16:01:18 +02:00
1134f5b099 restore starring 2025-05-28 14:24:53 +02:00
6272f71355 better creator badges 2025-05-28 14:10:33 +02:00
082924a3ba fixed messaging! 2025-05-28 14:06:36 +02:00
2a1b6f8a22 fix trashing in rooms 2025-05-28 13:45:32 +02:00
d77dcec068 fix move 2025-05-28 12:32:40 +02:00
ef4b4ab39f fix downloads 2025-05-28 12:13:56 +02:00
552d1feb2e fix file batch deletion 2025-05-28 12:09:56 +02:00
9b98370989 Started room separation 2025-05-28 11:37:25 +02:00
11446e00db debugger cleanup 2025-05-28 10:06:30 +02:00
d4465c20a8 rooms list separation 2025-05-28 10:05:00 +02:00
92bf70974f room member separation 2025-05-28 10:03:47 +02:00
71072994b5 Update profile.html 2025-05-28 10:02:38 +02:00
b091f1bb4e convo manager separation 2025-05-28 10:01:04 +02:00
c9c0eba15b convo-list separation 2025-05-28 09:59:40 +02:00
5c5d03e60c conversation scripts and style separation 2025-05-28 09:58:47 +02:00
56d9b5e95b contact list JS separation 2025-05-28 09:55:16 +02:00
e20af39e83 contact form JS separation 2025-05-28 09:53:57 +02:00
437a054d3b color-logger script 2025-05-28 09:51:52 +02:00
669a96174c Revert "Update conversation.html"
This reverts commit d76bee84f9.
2025-05-27 16:52:30 +02:00
d76bee84f9 Update conversation.html 2025-05-27 16:43:40 +02:00
348a1dd601 better spinner icon 2025-05-27 16:25:40 +02:00
c0d93fe6ac Update conversation.html 2025-05-27 16:21:08 +02:00
c12ccaab53 Update conversation.html 2025-05-27 16:15:39 +02:00
45b3fb0cd6 implement socket heartbeat 2025-05-27 16:12:15 +02:00
e9b1fb6577 socketio changes 2025-05-27 16:06:59 +02:00
26572b740e Update conversation.html 2025-05-27 16:02:21 +02:00
ca0c3ef4bd Update conversation.html 2025-05-27 15:58:00 +02:00
37cc454804 Update conversation.html 2025-05-27 15:52:14 +02:00
586337ceec Update conversation.html 2025-05-27 15:49:49 +02:00
246 changed files with 14078 additions and 3622 deletions

View File

@@ -1,33 +1,57 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
postgresql-client \
build-essential \
libpq-dev \
curl \
netcat-traditional \
&& rm -rf /var/lib/apt/lists/*
# Create a non-root user
RUN useradd -m -u 1000 celery
# Set working directory
WORKDIR /app
# Copy requirements first to leverage Docker cache
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of the application
# Copy application code
COPY . .
# Create migrations directory if it doesn't exist
RUN mkdir -p migrations/versions
# Create necessary directories and set permissions
RUN mkdir -p /app/uploads /app/static/uploads && \
chown -R celery:celery /app
# Make entrypoint script executable
RUN chmod +x entrypoint.sh
# Create and set up startup script
RUN echo '#!/bin/bash\n\
echo "Waiting for database..."\n\
while ! nc -z db 5432; do\n\
sleep 0.1\n\
done\n\
echo "Database is ready!"\n\
\n\
echo "Waiting for Redis..."\n\
while ! nc -z redis 6379; do\n\
sleep 0.1\n\
done\n\
echo "Redis is ready!"\n\
\n\
echo "Running database migrations..."\n\
flask db upgrade\n\
\n\
echo "Creating admin user..."\n\
flask create-admin\n\
\n\
echo "Starting application..."\n\
exec "$@"' > /app/start.sh && \
chmod +x /app/start.sh && \
chown celery:celery /app/start.sh
# Set environment variables
ENV FLASK_APP=app.py
ENV FLASK_ENV=production
# Switch to non-root user
USER celery
# Expose the port the app runs on
EXPOSE 5000
# Use the entrypoint script
ENTRYPOINT ["./entrypoint.sh"]
# Set entrypoint
ENTRYPOINT ["/app/start.sh"]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

59
app.py
View File

@@ -1,8 +1,9 @@
from flask import Flask, send_from_directory
import random
from flask import Flask, send_from_directory, jsonify
from flask_migrate import Migrate
from dotenv import load_dotenv
import os
from models import User
from models import User, SiteSettings
from flask_wtf.csrf import generate_csrf
from routes.room_files import room_files_bp
from routes.user import user_bp
@@ -11,7 +12,9 @@ from routes.trash import trash_bp
from tasks import cleanup_trash
import click
from utils import timeago
from extensions import db, login_manager, csrf, socketio
from extensions import db, login_manager, csrf
from utils.email_templates import create_default_templates
from celery_worker import init_celery, celery
# Load environment variables
load_dotenv()
@@ -24,6 +27,7 @@ def create_app():
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', 'your-secure-secret-key-here')
app.config['UPLOAD_FOLDER'] = os.path.join(app.root_path, 'static', 'uploads')
app.config['CSS_VERSION'] = os.getenv('CSS_VERSION', '1.0.3') # Add CSS version for cache busting
# Initialize extensions
db.init_app(app)
@@ -31,24 +35,54 @@ def create_app():
login_manager.init_app(app)
login_manager.login_view = 'auth.login'
csrf.init_app(app)
socketio.init_app(app)
# Initialize Celery
init_celery(app)
@app.context_processor
def inject_csrf_token():
return dict(csrf_token=generate_csrf())
@app.context_processor
def inject_config():
site_settings = SiteSettings.query.first()
if not site_settings:
site_settings = SiteSettings()
db.session.add(site_settings)
db.session.commit()
return dict(config=app.config, site_settings=site_settings)
# User loader for Flask-Login
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# Health check endpoint
@app.route('/health')
def health_check():
try:
# Check database connection
db.session.execute('SELECT 1')
# Check Redis connection
celery.control.inspect().ping()
return jsonify({
'status': 'healthy',
'database': 'connected',
'redis': 'connected'
}), 200
except Exception as e:
return jsonify({
'status': 'unhealthy',
'error': str(e)
}), 500
# Initialize routes
from routes import init_app
init_app(app)
app.register_blueprint(room_files_bp, url_prefix='/api/rooms')
app.register_blueprint(user_bp, url_prefix='/api/users')
app.register_blueprint(room_members_bp, url_prefix='/api/rooms')
app.register_blueprint(trash_bp, url_prefix='/api/rooms')
app.register_blueprint(user_bp)
app.register_blueprint(trash_bp, url_prefix='/api/trash')
@app.cli.command("cleanup-trash")
def cleanup_trash_command():
@@ -72,7 +106,7 @@ def create_app():
is_admin=True,
is_active=True
)
admin.set_password('q]H488h[8?.A')
admin.set_password('changeme')
db.session.add(admin)
db.session.commit()
click.echo("Default administrator user created successfully.")
@@ -80,6 +114,15 @@ def create_app():
# Register custom filters
app.jinja_env.filters['timeago'] = timeago
# Create default email templates if they don't exist
with app.app_context():
try:
# Ensure database tables exist
db.create_all()
create_default_templates()
except Exception as e:
print(f"Warning: Could not create default templates: {e}")
return app
app = create_app()
@@ -89,4 +132,4 @@ def profile_pic(filename):
return send_from_directory(os.path.join(os.getcwd(), 'uploads', 'profile_pics'), filename)
if __name__ == '__main__':
socketio.run(app, debug=True)
app.run(debug=True)

51
celery_worker.py Normal file
View File

@@ -0,0 +1,51 @@
from celery import Celery
from flask import current_app
import os
import logging
# Configure logging
logger = logging.getLogger(__name__)
# Get Redis URL from environment variable or use default
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
# Configure Celery
celery = Celery(
'docupulse',
backend=REDIS_URL,
broker=REDIS_URL,
# Add some default configuration
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='UTC',
enable_utc=True,
# Add retry configuration
task_acks_late=True,
task_reject_on_worker_lost=True,
task_default_retry_delay=300, # 5 minutes
task_max_retries=3
)
def init_celery(app):
"""Initialize Celery with Flask app context"""
celery.conf.update(app.config)
class ContextTask(celery.Task):
"""Celery task that runs within Flask app context"""
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Handle task failure"""
logger.error(f'Task {task_id} failed: {exc}')
super().on_failure(exc, task_id, args, kwargs, einfo)
def on_retry(self, exc, task_id, args, kwargs, einfo):
"""Handle task retry"""
logger.warning(f'Task {task_id} is being retried: {exc}')
super().on_retry(exc, task_id, args, kwargs, einfo)
celery.Task = ContextTask
return celery

11
create_notifs_table.py Normal file
View File

@@ -0,0 +1,11 @@
from app import app, db
from models import Notif
def create_notifs_table():
with app.app_context():
# Create the table
Notif.__table__.create(db.engine)
print("Notifications table created successfully!")
if __name__ == '__main__':
create_notifs_table()

View File

@@ -3,6 +3,7 @@ version: '3.8'
services:
web:
build: .
command: gunicorn --bind 0.0.0.0:5000 app:app
ports:
- "10335:5000"
environment:
@@ -12,11 +13,18 @@ services:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=docupulse
- REDIS_URL=redis://redis:6379/0
volumes:
- uploads:/app/uploads
depends_on:
- db
- redis
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5000/health"]
interval: 30s
timeout: 10s
retries: 3
deploy:
resources:
limits:
@@ -32,6 +40,49 @@ services:
volumes:
- postgres_data:/var/lib/postgresql/data
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 30s
timeout: 10s
retries: 3
redis:
image: redis:7
ports:
- "6379:6379"
restart: unless-stopped
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 30s
timeout: 10s
retries: 3
celery_worker:
build: .
command: celery -A celery_worker.celery worker --loglevel=info
volumes:
- .:/app
environment:
- FLASK_APP=app.py
- FLASK_ENV=development
- DATABASE_URL=postgresql://postgres:postgres@db:5432/docupulse
- REDIS_URL=redis://redis:6379/0
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
restart: unless-stopped
healthcheck:
test: ["CMD", "celery", "-A", "celery_worker.celery", "inspect", "ping"]
interval: 30s
timeout: 10s
retries: 3
deploy:
resources:
limits:
cpus: '0.5'
memory: 512M
volumes:
postgres_data:

View File

@@ -6,6 +6,7 @@ echo "POSTGRES_USER: $POSTGRES_USER"
echo "POSTGRES_PASSWORD: $POSTGRES_PASSWORD"
echo "POSTGRES_DB: $POSTGRES_DB"
echo "DATABASE_URL: $DATABASE_URL"
echo "REDIS_URL: $REDIS_URL"
# Wait for the database to be ready
echo "Waiting for database to be ready..."
@@ -14,6 +15,13 @@ while ! nc -z db 5432; do
done
echo "Database is ready!"
# Wait for Redis to be ready
echo "Waiting for Redis to be ready..."
while ! nc -z redis 6379; do
sleep 0.1
done
echo "Redis is ready!"
# Wait for PostgreSQL to be ready to accept connections
echo "Waiting for PostgreSQL to accept connections..."
until PGPASSWORD=$POSTGRES_PASSWORD psql -h db -U $POSTGRES_USER -d $POSTGRES_DB -c '\q'; do
@@ -29,6 +37,34 @@ flask db init
flask db migrate -m "Initial migration"
flask db upgrade
# Create events table
echo "Creating events table..."
python3 -c "
from migrations.add_events_table import upgrade
from app import create_app
app = create_app()
with app.app_context():
try:
upgrade()
print('Events table created successfully')
except Exception as e:
print(f'Error creating events table: {e}')
"
# Create notifs table
echo "Creating notifs table..."
python3 -c "
from migrations.add_notifs_table import upgrade
from app import create_app
app = create_app()
with app.app_context():
try:
upgrade()
print('Notifs table created successfully')
except Exception as e:
print(f'Error creating notifs table: {e}')
"
# Create default site settings if they don't exist
echo "Creating default site settings..."
python3 -c "

View File

@@ -1,4 +1,3 @@
from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
@@ -6,5 +5,4 @@ from flask_wtf.csrf import CSRFProtect
# Initialize extensions
db = SQLAlchemy()
login_manager = LoginManager()
csrf = CSRFProtect()
socketio = SocketIO(cors_allowed_origins="*")
csrf = CSRFProtect()

View File

@@ -56,4 +56,18 @@ class ConversationForm(FlaskForm):
def __init__(self, *args, **kwargs):
super(ConversationForm, self).__init__(*args, **kwargs)
self.members.choices = [(u.id, f"{u.username} {u.last_name}") for u in User.query.filter_by(is_active=True).all()]
self.members.choices = [(u.id, f"{u.username} {u.last_name}") for u in User.query.filter_by(is_active=True).all()]
class CompanySettingsForm(FlaskForm):
company_name = StringField('Company Name', validators=[Optional(), Length(max=100)])
company_website = StringField('Website', validators=[Optional(), Length(max=200)])
company_email = StringField('Email', validators=[Optional(), Email(), Length(max=100)])
company_phone = StringField('Phone', validators=[Optional(), Length(max=20)])
company_address = StringField('Address', validators=[Optional(), Length(max=200)])
company_city = StringField('City', validators=[Optional(), Length(max=100)])
company_state = StringField('State', validators=[Optional(), Length(max=100)])
company_zip = StringField('ZIP Code', validators=[Optional(), Length(max=20)])
company_country = StringField('Country', validators=[Optional(), Length(max=100)])
company_description = TextAreaField('Description', validators=[Optional()])
company_industry = StringField('Industry', validators=[Optional(), Length(max=100)])
company_logo = FileField('Company Logo', validators=[FileAllowed(['jpg', 'jpeg', 'png', 'gif'], 'Images only!')])

Binary file not shown.

View File

@@ -0,0 +1,61 @@
import os
import sys
from pathlib import Path
# Add the parent directory to Python path so we can import from root
sys.path.append(str(Path(__file__).parent.parent))
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from extensions import db
from sqlalchemy import text
def upgrade():
# Create events table
with db.engine.connect() as conn:
conn.execute(text('''
CREATE TABLE IF NOT EXISTS events (
id SERIAL PRIMARY KEY,
event_type VARCHAR(50) NOT NULL,
user_id INTEGER NOT NULL REFERENCES "user" (id),
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
details JSONB,
ip_address VARCHAR(45),
user_agent VARCHAR(255)
);
-- Create index on event_type for faster filtering
CREATE INDEX IF NOT EXISTS idx_events_event_type ON events(event_type);
-- Create index on timestamp for faster date-based queries
CREATE INDEX IF NOT EXISTS idx_events_timestamp ON events(timestamp);
-- Create index on user_id for faster user-based queries
CREATE INDEX IF NOT EXISTS idx_events_user_id ON events(user_id);
'''))
conn.commit()
def downgrade():
# Drop events table and its indexes
with db.engine.connect() as conn:
conn.execute(text('''
DROP INDEX IF EXISTS idx_events_event_type;
DROP INDEX IF EXISTS idx_events_timestamp;
DROP INDEX IF EXISTS idx_events_user_id;
DROP TABLE IF EXISTS events;
'''))
conn.commit()
if __name__ == '__main__':
app = Flask(__name__)
# Use the same database configuration as in app.py
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', 'postgresql://postgres:1253@localhost:5432/docupulse')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
print("Connecting to database...")
db.init_app(app)
with app.app_context():
upgrade()

View File

@@ -0,0 +1,61 @@
import os
import sys
from pathlib import Path
# Add the parent directory to Python path so we can import from root
sys.path.append(str(Path(__file__).parent.parent))
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from extensions import db
from sqlalchemy import text
def upgrade():
# Create notifs table
with db.engine.connect() as conn:
conn.execute(text('''
CREATE TABLE IF NOT EXISTS notifs (
id SERIAL PRIMARY KEY,
notif_type VARCHAR(50) NOT NULL,
user_id INTEGER NOT NULL REFERENCES "user" (id),
sender_id INTEGER REFERENCES "user" (id),
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
read BOOLEAN NOT NULL DEFAULT FALSE,
details JSONB
);
-- Create indexes for faster queries
CREATE INDEX IF NOT EXISTS idx_notifs_notif_type ON notifs(notif_type);
CREATE INDEX IF NOT EXISTS idx_notifs_timestamp ON notifs(timestamp);
CREATE INDEX IF NOT EXISTS idx_notifs_user_id ON notifs(user_id);
CREATE INDEX IF NOT EXISTS idx_notifs_sender_id ON notifs(sender_id);
CREATE INDEX IF NOT EXISTS idx_notifs_read ON notifs(read);
'''))
conn.commit()
def downgrade():
# Drop notifs table and its indexes
with db.engine.connect() as conn:
conn.execute(text('''
DROP INDEX IF EXISTS idx_notifs_notif_type;
DROP INDEX IF EXISTS idx_notifs_timestamp;
DROP INDEX IF EXISTS idx_notifs_user_id;
DROP INDEX IF EXISTS idx_notifs_sender_id;
DROP INDEX IF EXISTS idx_notifs_read;
DROP TABLE IF EXISTS notifs;
'''))
conn.commit()
if __name__ == '__main__':
app = Flask(__name__)
# Use the same database configuration as in app.py
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', 'postgresql://postgres:1253@localhost:5432/docupulse')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
print("Connecting to database...")
db.init_app(app)
with app.app_context():
upgrade()

View File

@@ -0,0 +1,41 @@
"""add key value settings table
Revision ID: 0a8006bd1732
Revises: 20519a2437c2
Create Date: 2025-06-02 14:10:54.033943
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = '0a8006bd1732'
down_revision = '20519a2437c2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'key_value_settings' not in tables:
op.create_table('key_value_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=100), nullable=False),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('key_value_settings')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-26 14:00:05.521776
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,23 +19,41 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('message')]
with op.batch_alter_table('message', schema=None) as batch_op:
batch_op.add_column(sa.Column('has_attachment', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('attachment_name', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('attachment_path', sa.String(length=512), nullable=True))
batch_op.add_column(sa.Column('attachment_type', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('attachment_size', sa.Integer(), nullable=True))
if 'has_attachment' not in columns:
batch_op.add_column(sa.Column('has_attachment', sa.Boolean(), nullable=True))
if 'attachment_name' not in columns:
batch_op.add_column(sa.Column('attachment_name', sa.String(length=255), nullable=True))
if 'attachment_path' not in columns:
batch_op.add_column(sa.Column('attachment_path', sa.String(length=512), nullable=True))
if 'attachment_type' not in columns:
batch_op.add_column(sa.Column('attachment_type', sa.String(length=100), nullable=True))
if 'attachment_size' not in columns:
batch_op.add_column(sa.Column('attachment_size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('message')]
with op.batch_alter_table('message', schema=None) as batch_op:
batch_op.drop_column('attachment_size')
batch_op.drop_column('attachment_type')
batch_op.drop_column('attachment_path')
batch_op.drop_column('attachment_name')
batch_op.drop_column('has_attachment')
if 'attachment_size' in columns:
batch_op.drop_column('attachment_size')
if 'attachment_type' in columns:
batch_op.drop_column('attachment_type')
if 'attachment_path' in columns:
batch_op.drop_column('attachment_path')
if 'attachment_name' in columns:
batch_op.drop_column('attachment_name')
if 'has_attachment' in columns:
batch_op.drop_column('has_attachment')
# ### end Alembic commands ###

View File

@@ -2,11 +2,12 @@
Revision ID: 1c297825e3a9
Revises:
Create Date: 2025-05-23 08:39:40.494853
Create Date: 2025-06-02 13:26:30.353000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -17,20 +18,27 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=150), nullable=False),
sa.Column('email', sa.String(length=150), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
# Check if the table exists before creating it
conn = op.get_bind()
inspector = sa.inspect(conn)
if 'user' not in inspector.get_table_names():
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'user' not in tables:
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=150), nullable=False),
sa.Column('email', sa.String(length=150), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
# ### end Alembic commands ###
# ### end Alembic commands ###

View File

@@ -0,0 +1,47 @@
"""add_mails_table
Revision ID: 20519a2437c2
Revises: 444d76da74ba
Create Date: 2025-06-02 09:04:39.972021
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = '20519a2437c2'
down_revision = '444d76da74ba'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'mails' not in tables:
op.create_table('mails',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('recipient', sa.String(length=150), nullable=False),
sa.Column('subject', sa.String(length=200), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('template_id', sa.Integer(), nullable=True),
sa.Column('notif_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['notif_id'], ['notifs.id'], ),
sa.ForeignKeyConstraint(['template_id'], ['email_templates.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('mails')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 16:10:53.731035
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,15 +19,25 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('contact')]
with op.batch_alter_table('contact', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_admin', sa.Boolean(), nullable=True))
if 'is_admin' not in columns:
batch_op.add_column(sa.Column('is_admin', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('contact')]
with op.batch_alter_table('contact', schema=None) as batch_op:
batch_op.drop_column('is_admin')
if 'is_admin' in columns:
batch_op.drop_column('is_admin')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:44:58.832286
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,17 +19,22 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_member_permissions',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('can_view', sa.Boolean(), nullable=False),
sa.Column('can_upload', sa.Boolean(), nullable=False),
sa.Column('can_delete', sa.Boolean(), nullable=False),
sa.Column('can_share', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_member_permissions' not in tables:
op.create_table('room_member_permissions',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('can_view', sa.Boolean(), nullable=False),
sa.Column('can_upload', sa.Boolean(), nullable=False),
sa.Column('can_delete', sa.Boolean(), nullable=False),
sa.Column('can_share', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:27:17.497481
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,15 +19,24 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_members',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
with op.batch_alter_table('room', schema=None) as batch_op:
batch_op.drop_column('is_private')
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_members' not in tables:
op.create_table('room_members',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
# Check if is_private column exists before dropping it
columns = [col['name'] for col in inspector.get_columns('room')]
if 'is_private' in columns:
with op.batch_alter_table('room', schema=None) as batch_op:
batch_op.drop_column('is_private')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:25:27.880150
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,16 +19,21 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('is_private', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room' not in tables:
op.create_table('room',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('is_private', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 09:24:23.926302
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,12 +19,21 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('user')]
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('phone', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('position', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('notes', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('is_active', sa.Boolean(), nullable=True))
if 'phone' not in columns:
batch_op.add_column(sa.Column('phone', sa.String(length=20), nullable=True))
if 'company' not in columns:
batch_op.add_column(sa.Column('company', sa.String(length=100), nullable=True))
if 'position' not in columns:
batch_op.add_column(sa.Column('position', sa.String(length=100), nullable=True))
if 'notes' not in columns:
batch_op.add_column(sa.Column('notes', sa.Text(), nullable=True))
if 'is_active' not in columns:
batch_op.add_column(sa.Column('is_active', sa.Boolean(), nullable=True))
# ### end Alembic commands ###

View File

@@ -0,0 +1,63 @@
"""add_notifications_table
Revision ID: 444d76da74ba
Revises: c770e08966b4
Create Date: 2025-06-02 08:25:48.241102
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '444d76da74ba'
down_revision = 'c770e08966b4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'template_variables' in tables:
op.drop_table('template_variables')
op.create_table('notification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=200), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('type', sa.String(length=50), nullable=False),
sa.Column('read', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'notification' in tables:
op.drop_table('notification')
if 'template_variables' not in tables:
op.create_table('template_variables',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('notification_type', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('variable_name', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('description', sa.VARCHAR(length=200), autoincrement=False, nullable=False),
sa.Column('example_value', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('template_variables_pkey'))
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-24 10:07:02.159730
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,22 +19,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('path', sa.String(length=1024), nullable=False),
sa.Column('type', sa.String(length=10), nullable=False),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.Column('modified', sa.Float(), nullable=True),
sa.Column('uploaded_by', sa.Integer(), nullable=False),
sa.Column('uploaded_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['uploaded_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('room_member_permissions', schema=None) as batch_op:
batch_op.drop_column('preferred_view')
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_file' not in tables:
op.create_table('room_file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('path', sa.String(length=1024), nullable=False),
sa.Column('type', sa.String(length=10), nullable=False),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.Column('modified', sa.Float(), nullable=True),
sa.Column('uploaded_by', sa.Integer(), nullable=False),
sa.Column('uploaded_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['uploaded_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Check if preferred_view column exists before trying to drop it
columns = [col['name'] for col in inspector.get_columns('room_member_permissions')]
if 'preferred_view' in columns:
with op.batch_alter_table('room_member_permissions', schema=None) as batch_op:
batch_op.drop_column('preferred_view')
# ### end Alembic commands ###
@@ -44,4 +54,4 @@ def downgrade():
batch_op.add_column(sa.Column('preferred_view', sa.VARCHAR(length=10), autoincrement=False, nullable=False))
op.drop_table('room_file')
# ### end Alembic commands ###
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-24 18:14:38.320999
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
@@ -18,43 +19,63 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('room_file')]
with op.batch_alter_table('room_file', schema=None) as batch_op:
batch_op.add_column(sa.Column('starred', sa.Boolean(), nullable=True))
batch_op.alter_column('path',
existing_type=sa.VARCHAR(length=1024),
type_=sa.String(length=255),
existing_nullable=False)
batch_op.alter_column('size',
existing_type=sa.BIGINT(),
type_=sa.Integer(),
existing_nullable=True)
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=True)
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
if 'starred' not in columns:
batch_op.add_column(sa.Column('starred', sa.Boolean(), nullable=True))
# Only alter columns if they exist
if 'path' in columns:
batch_op.alter_column('path',
existing_type=sa.VARCHAR(length=1024),
type_=sa.String(length=255),
existing_nullable=False)
if 'size' in columns:
batch_op.alter_column('size',
existing_type=sa.BIGINT(),
type_=sa.Integer(),
existing_nullable=True)
if 'uploaded_by' in columns:
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=True)
if 'uploaded_at' in columns:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('room_file')]
with op.batch_alter_table('room_file', schema=None) as batch_op:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=False)
batch_op.alter_column('size',
existing_type=sa.Integer(),
type_=sa.BIGINT(),
existing_nullable=True)
batch_op.alter_column('path',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=1024),
existing_nullable=False)
batch_op.drop_column('starred')
if 'uploaded_at' in columns:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
if 'uploaded_by' in columns:
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=False)
if 'size' in columns:
batch_op.alter_column('size',
existing_type=sa.Integer(),
type_=sa.BIGINT(),
existing_nullable=True)
if 'path' in columns:
batch_op.alter_column('path',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=1024),
existing_nullable=False)
if 'starred' in columns:
batch_op.drop_column('starred')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2024-03-19 10:05:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.sql import text

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-25 10:03:03.423064
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-26 10:42:17.287566
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,17 +19,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('site_settings')]
with op.batch_alter_table('site_settings', schema=None) as batch_op:
batch_op.add_column(sa.Column('company_website', sa.String(length=200), nullable=True))
batch_op.add_column(sa.Column('company_email', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_phone', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company_address', sa.String(length=200), nullable=True))
batch_op.add_column(sa.Column('company_city', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_state', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_zip', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company_country', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_description', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('company_industry', sa.String(length=100), nullable=True))
if 'company_website' not in columns:
batch_op.add_column(sa.Column('company_website', sa.String(length=200), nullable=True))
if 'company_email' not in columns:
batch_op.add_column(sa.Column('company_email', sa.String(length=100), nullable=True))
if 'company_phone' not in columns:
batch_op.add_column(sa.Column('company_phone', sa.String(length=20), nullable=True))
if 'company_address' not in columns:
batch_op.add_column(sa.Column('company_address', sa.String(length=200), nullable=True))
if 'company_city' not in columns:
batch_op.add_column(sa.Column('company_city', sa.String(length=100), nullable=True))
if 'company_state' not in columns:
batch_op.add_column(sa.Column('company_state', sa.String(length=100), nullable=True))
if 'company_zip' not in columns:
batch_op.add_column(sa.Column('company_zip', sa.String(length=20), nullable=True))
if 'company_country' not in columns:
batch_op.add_column(sa.Column('company_country', sa.String(length=100), nullable=True))
if 'company_description' not in columns:
batch_op.add_column(sa.Column('company_description', sa.Text(), nullable=True))
if 'company_industry' not in columns:
batch_op.add_column(sa.Column('company_industry', sa.String(length=100), nullable=True))
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2024-03-19 10:15:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.sql import text

View File

@@ -7,8 +7,10 @@ Create Date: 2025-05-25 21:16:39.683736
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9faab7ef6036'
down_revision = 'ca9026520dad'
@@ -18,25 +20,35 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('site_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('primary_color', sa.String(length=7), nullable=True),
sa.Column('secondary_color', sa.String(length=7), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'site_settings' not in tables:
op.create_table('site_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('primary_color', sa.String(length=7), nullable=True),
sa.Column('secondary_color', sa.String(length=7), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('color_settings')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('color_settings',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('primary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('secondary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('color_settings_pkey'))
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'color_settings' not in tables:
op.create_table('color_settings',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('primary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('secondary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('color_settings_pkey'))
)
op.drop_table('site_settings')
# ### end Alembic commands ###
# ### end Alembic commands ###

Some files were not shown because too many files have changed in this diff Show More