302 Commits

Author SHA1 Message Date
4cf9cca116 version display on instances page 2025-06-23 15:46:29 +02:00
af375a2b5c version v3 2025-06-23 15:17:17 +02:00
23a55e025c Update launch_progress.js 2025-06-23 15:05:07 +02:00
40b1a63cf5 much improved launch process 2025-06-23 14:50:37 +02:00
033f82eb2b better 504 handling 2025-06-23 14:24:13 +02:00
1370bef1f1 version v2 2025-06-23 14:11:11 +02:00
1a6741ec10 remove versions for now 2025-06-23 11:06:54 +02:00
0b9005b481 Update launch_progress.js 2025-06-23 10:56:58 +02:00
7ec3027410 Update entrypoint.sh 2025-06-23 09:46:21 +02:00
405cc83ba1 Update launch_progress.js 2025-06-23 09:44:03 +02:00
0bbdf0eaab better timeouts 2025-06-23 09:35:15 +02:00
0da5d9305d version 2025-06-23 09:30:04 +02:00
9fc09be7de Update admin_api.py 2025-06-23 09:09:26 +02:00
3f8517ec7d Update launch_progress.js 2025-06-20 19:44:02 +02:00
e85d91d1f4 improved launch process using cloudflare 2025-06-20 19:34:37 +02:00
bb139a2b95 better features and pricing 2025-06-20 16:05:15 +02:00
c06dd6c578 starting public section 2025-06-20 14:56:57 +02:00
3d4034d6b1 copy smtp to new instance 2025-06-20 14:09:49 +02:00
f825bab894 confimration mail at end of launch process 2025-06-20 13:36:45 +02:00
c9d1d7416b password reset online test 2025-06-20 13:18:13 +02:00
e25c7660b0 launch process completed! 2025-06-20 12:51:37 +02:00
843af814fd apply company info 2025-06-20 12:41:06 +02:00
cb19b8b21c fix company null 2025-06-20 10:24:53 +02:00
95456651a6 Update connections.js 2025-06-20 10:11:10 +02:00
57aebb8c9e More dev wiki 2025-06-20 09:56:30 +02:00
e486b8a83d Start of dev pages 2025-06-19 23:02:34 +02:00
2f6de65e5c add flash messages back 2025-06-19 16:20:49 +02:00
7092167001 password reset 2025-06-19 16:11:42 +02:00
efdb6d50c3 Update launch_progress.js 2025-06-16 15:52:09 +02:00
04448e34c2 Update launch_progress.js 2025-06-16 15:46:19 +02:00
da75b4cd50 Update launch_progress.js 2025-06-16 11:16:00 +02:00
4e9a3fe139 fix launch issue 2025-06-16 10:55:22 +02:00
e469db9ba6 deploying works! 2025-06-16 10:19:28 +02:00
64569c3505 Split files 2025-06-16 08:42:02 +02:00
15f69f533a download docker compose during launch process 2025-06-15 13:37:08 +02:00
5d28bf31dd Delete admin_api.py 2025-06-15 13:18:58 +02:00
3f3dba8759 checking portainer connection 2025-06-15 13:17:44 +02:00
8fde46c157 Update launch_progress.html 2025-06-15 12:57:11 +02:00
d87b3e5b02 Update launch_progress.html 2025-06-15 12:52:40 +02:00
6b87fd6fc1 first 4 steps of launch 2025-06-14 18:18:43 +02:00
68940e87f9 instance launching (fake) 2025-06-14 14:45:41 +02:00
f0115a70f9 Splitting css and JS files on settings pages 2025-06-13 14:18:35 +02:00
a801eb1eeb steps for launching 2025-06-12 15:40:57 +02:00
83c94acbac step one of launch instance 2025-06-12 15:07:01 +02:00
5c2c514825 fixed settings not loading 2025-06-12 14:55:55 +02:00
04689797f7 add connections 2025-06-11 10:29:58 +02:00
468235662b Update logs.html 2025-06-10 13:56:29 +02:00
f5e6076123 Update instance_detail.html 2025-06-10 13:51:59 +02:00
0f1dc51949 user roles 2025-06-10 12:42:48 +02:00
583f1c9d32 Update instance_detail.html 2025-06-10 12:39:46 +02:00
af4ffb8559 Update instance_detail.html 2025-06-10 12:15:18 +02:00
a9130bbe61 Update company_info.html 2025-06-10 12:07:49 +02:00
7015a46f94 company info on instance detail 2025-06-10 12:04:29 +02:00
b96d5e4487 fix settings page 2025-06-10 11:48:34 +02:00
5bb0667060 Details for instances 2025-06-10 09:47:54 +02:00
580289d3a1 Getting instance data 2025-06-10 09:01:15 +02:00
5c3cce1556 Update admin_api.py 2025-06-10 07:45:27 +02:00
f2361b94ba Update admin_api.py 2025-06-09 22:52:06 +02:00
f71b461e29 Update admin_api.py 2025-06-09 22:43:57 +02:00
326bd1bd72 Added JWT token process 2025-06-09 22:34:19 +02:00
8f76832f69 updated response to login API 2025-06-09 22:27:05 +02:00
309b03956f Update instances.html 2025-06-09 15:41:18 +02:00
176ab4a194 updated authentication of instances 2025-06-09 15:07:29 +02:00
e43718894b better admin creation 2025-06-09 13:45:00 +02:00
c31f1bb59d fix migration... again... 2025-06-09 13:01:51 +02:00
2014c326b1 Active status check 2025-06-09 10:24:42 +02:00
112a99ffcb adding instances 2025-06-09 09:54:37 +02:00
7aa96119a9 Master instances 2025-06-08 21:54:29 +02:00
53ac07a9ee Update docker-compose.yml 2025-06-08 14:15:01 +02:00
3288824383 Update docker-compose.yml 2025-06-08 13:55:57 +02:00
9194b48eb8 Update docker-compose.yml 2025-06-08 13:40:14 +02:00
301a83b295 remove internal network check 2025-06-08 13:38:45 +02:00
2521c319a0 Revert "Update docker-compose.yml"
This reverts commit 522ea2d976.
2025-06-08 13:36:16 +02:00
522ea2d976 Update docker-compose.yml 2025-06-06 22:30:49 +02:00
91469735d2 Update entrypoint.sh 2025-06-06 21:22:57 +02:00
7a8005c263 testing 2025-06-06 21:17:39 +02:00
fc05fda666 testing entrypoint 2025-06-06 21:08:19 +02:00
fd34dd20ca create DB 2025-06-06 15:20:18 +02:00
a917822fb8 Update docker-compose.yml 2025-06-06 15:16:40 +02:00
30142f83df Update docker-compose.yml 2025-06-06 14:54:47 +02:00
1cbfab0c2f Master setting 2025-06-06 14:43:08 +02:00
46b56369c2 fix settings migration file 2025-06-06 14:31:29 +02:00
2db476ce09 update master logging 2025-06-06 14:28:47 +02:00
c5c1f35c08 update compose 2025-06-06 12:19:16 +02:00
3e5285225d first part of master_slave relation 2025-06-06 12:12:32 +02:00
e58bec3da0 Update recent_activity.html 2025-06-06 12:00:34 +02:00
cde3cba527 Update docker-compose.yml 2025-06-06 11:54:57 +02:00
c0346efcc7 Update docker-compose.yml 2025-06-06 11:53:12 +02:00
acececf899 Update docker-compose.yml 2025-06-06 11:47:22 +02:00
8509b0567b error pages 2025-06-06 10:35:56 +02:00
d619283d09 docker upload directories fixes 2025-06-06 10:12:46 +02:00
e4238d9fdb Better dashboard cards 2025-06-06 09:53:50 +02:00
d7c1305dae Update recent_activity.html 2025-06-06 09:49:25 +02:00
f6abdb5c63 better usage limit and deletion warnings 2025-06-06 09:44:23 +02:00
51cea567ca usage limits for rooms and convos 2025-06-06 09:37:26 +02:00
85b769f7dd Update admin_api.py 2025-06-06 09:19:33 +02:00
996f7dca16 Update requirements.txt 2025-06-06 09:16:11 +02:00
eb2946510a Update docker-compose.yml 2025-06-06 09:13:44 +02:00
99a76c540f Update docker-compose.yml 2025-06-06 09:08:27 +02:00
56177b2811 admin API with internal network 2025-06-06 09:03:39 +02:00
57fa221d47 Better contact form 2025-06-05 21:05:15 +02:00
f65265b4a5 Update 72ab6c4c6a5f_merge_heads.py 2025-06-05 19:04:39 +02:00
0047cfbcd1 Update entrypoint.sh 2025-06-05 18:52:20 +02:00
33f6e0386b added manager user type 2025-06-05 14:43:06 +02:00
164e8373a4 cool CSS 2025-06-05 12:27:57 +02:00
5834aec885 Update colors.css 2025-06-05 11:59:15 +02:00
ca32ee0de4 Update storage_usage.html 2025-06-05 11:55:26 +02:00
ee5b2d9fd9 try to fix insert 2025-06-05 11:52:39 +02:00
d4ae0fe2d8 Update add_docupulse_settings_table.py 2025-06-05 11:50:08 +02:00
39cbff2234 Update add_docupulse_settings_table.py 2025-06-05 11:47:26 +02:00
6273866324 bigint 2025-06-05 11:44:15 +02:00
a78f3c0786 added usage limit visuals and DB 2025-06-05 11:40:52 +02:00
97fde3388b Update company_info.html 2025-06-05 11:01:41 +02:00
6e5229c8ba Update company_info.html 2025-06-05 11:00:49 +02:00
b9df790d1f fix mail log 2025-06-05 10:51:56 +02:00
71213b87a0 fixed room links 2025-06-05 10:42:43 +02:00
5746600340 Update conversation.html 2025-06-05 10:32:55 +02:00
905a056c87 Better password security for new users 2025-06-04 14:21:12 +02:00
41cdd5ec7f better password management 2025-06-04 13:44:49 +02:00
88c3bc1b5b proper cache busting 2025-06-04 11:47:35 +02:00
0f9f9d1b73 Update file-grid.js 2025-06-04 11:32:47 +02:00
3dc897518e Update trash.js 2025-06-04 11:24:28 +02:00
79fa32d1dd deduplication of trash code 2025-06-04 11:16:43 +02:00
6ae1ee3365 trash empty route fix 2025-06-04 11:11:59 +02:00
add00d488c Update notification_overview.html 2025-06-04 11:06:53 +02:00
0a471792e1 Update notification_overview.html 2025-06-04 11:03:43 +02:00
e948a9e55f notifications on dashboard 2025-06-04 11:01:06 +02:00
7f97d90f04 fix permission issue on docker 2025-06-04 10:45:14 +02:00
b580bb2db3 better migrations 2025-06-04 09:31:12 +02:00
9dd4ac5863 Update app.py 2025-06-04 09:14:11 +02:00
27d4922ce8 Update docker-compose.yml 2025-06-04 09:09:10 +02:00
c1d4fe1c9a better health check 2025-06-04 09:08:39 +02:00
02e7710676 Update Dockerfile 2025-06-04 09:03:39 +02:00
cd16d34fe5 Update docker-compose.yml 2025-06-04 09:01:33 +02:00
4d38c8715e fixed preferred view 2025-06-04 08:55:31 +02:00
8edd96b671 remove celery 2025-06-04 08:33:04 +02:00
ea841e4d54 final migration issues fixed 2025-06-02 21:11:13 +02:00
5c6c3f436e more robust migrations 2025-06-02 18:58:48 +02:00
4dbaa27cba fixed migrations 2025-06-02 16:11:56 +02:00
c95a1c456b sending email async with celery 2025-06-02 14:55:50 +02:00
66ac834ab0 Update recent_activity.html 2025-06-02 14:36:34 +02:00
81ee935150 sending mails using SMTP 2025-06-02 14:33:04 +02:00
765c07316a SMTP Settings 2025-06-02 14:30:20 +02:00
694c8df364 Update email_templates.html 2025-06-02 13:15:35 +02:00
220d892fa0 template fix 2025-06-02 12:07:12 +02:00
75127394c7 fix settings page csrf 2025-06-02 11:46:42 +02:00
11745f2eb8 Better fill codes 2025-06-02 11:03:42 +02:00
fdef0c5f66 better reader for mails 2025-06-02 10:33:26 +02:00
5a9b6be79d email log 2025-06-02 09:30:42 +02:00
38e24a690a Update email_templates.html 2025-06-02 09:19:40 +02:00
7d08a57c85 add mail to table on notif 2025-06-02 09:17:21 +02:00
17e0781b14 add defaults templates 2025-06-02 08:52:45 +02:00
b06a282160 migration fixes 2025-06-02 08:34:46 +02:00
e8d79cca19 email templates page 2025-06-01 22:00:45 +02:00
047ad6ef10 Add email template table 2025-06-01 20:09:42 +02:00
06772ed48c Update storage_usage.html 2025-06-01 14:18:35 +02:00
b9233136a7 fixed some issues 2025-06-01 14:11:19 +02:00
85bfd0f3ae update file-grid 2025-06-01 13:33:55 +02:00
2800da1859 even better preview! 2025-06-01 12:46:06 +02:00
3a768146c1 better preview visuals 2025-06-01 12:43:24 +02:00
ea118a37c5 better preview of files 2025-06-01 12:38:51 +02:00
aeefd17b10 File preview 2025-06-01 12:31:10 +02:00
c0a97a1714 Update app.py 2025-05-31 23:22:42 +02:00
b55a919944 Update app.py 2025-05-31 23:20:24 +02:00
3e7f7ff636 Update app.py 2025-05-31 23:19:11 +02:00
e1390a8adc fix event filtering 2025-05-31 23:15:46 +02:00
1c74706736 Update entrypoint.sh 2025-05-31 23:12:35 +02:00
58c23a6380 notif changes 2025-05-31 23:10:00 +02:00
779e81346b unread notifs 2025-05-31 23:08:38 +02:00
08a11c240d room notifications 2025-05-31 22:58:31 +02:00
c452a920b1 fixed editing members 2025-05-31 22:53:52 +02:00
fda5655533 added extra log 2025-05-31 22:19:59 +02:00
ac49c842b8 Update base.html 2025-05-31 19:17:58 +02:00
a9c0debd6c unified dashboard cards view all buttons 2025-05-31 19:10:41 +02:00
c2f06a8e15 Update contacts.html 2025-05-31 19:09:41 +02:00
2c9b302a69 added events to event system 2025-05-31 19:07:29 +02:00
224d4d400e Add notif page 2025-05-31 18:28:53 +02:00
5e5d1beb5e fix csrf token on profile 2025-05-31 18:15:50 +02:00
4e6bf7b03c Btter version for convos 2025-05-31 13:02:56 +02:00
4bd5180b87 storage cards on dash 2025-05-31 13:00:06 +02:00
90bca4c93b dashboard improvement 2025-05-31 12:53:41 +02:00
36695c1398 fix some style issues 2025-05-31 12:34:16 +02:00
fb2837e523 change checkbox colors 2025-05-31 12:28:32 +02:00
45a1bc07c6 search inside folders 2025-05-31 12:24:48 +02:00
4494ebdeb3 centered modal 2025-05-31 12:20:35 +02:00
4bb776f801 fix paging on events 2025-05-31 12:18:52 +02:00
e0be56a7f4 fix file rename 2025-05-31 12:11:06 +02:00
821330eba5 Fix name not in rename modal 2025-05-31 12:09:24 +02:00
f13f5a1e08 fix upload multi 2025-05-30 23:31:14 +02:00
0d5fd83e01 fixed filters 2025-05-30 21:31:25 +02:00
50f7e115d6 fox more event logging 2025-05-30 21:19:48 +02:00
f7853f96ed fixing logs on multiple rooms 2025-05-30 21:08:58 +02:00
a08345e676 added a lot of logging 2025-05-30 20:43:14 +02:00
c09a5c758e fix a bunch is settings 2025-05-30 20:32:40 +02:00
43f29f9a46 update settings logs 2025-05-30 13:53:18 +02:00
24612879a1 Update conversations.html 2025-05-30 13:52:13 +02:00
7723cd0d70 logging auth, conversations, and contacts 2025-05-30 13:48:07 +02:00
9159817947 fix all csfr token issues 2025-05-30 13:22:51 +02:00
fee79c6ec7 Documentation in room files py 2025-05-30 12:57:25 +02:00
986db28494 user update logs 2025-05-29 23:02:30 +02:00
37fcc5f34c fixed some issues with profile and events 2025-05-29 22:40:59 +02:00
8f24e21d5d user logging 2025-05-29 22:33:05 +02:00
5dbdd43785 utils and event logging 2025-05-29 15:19:42 +02:00
6d959ac253 Revert "Added events system"
This reverts commit f00d569db3.
2025-05-29 14:45:52 +02:00
f00d569db3 Added events system 2025-05-29 14:27:15 +02:00
3174f8fa5b add events table 2025-05-29 13:57:28 +02:00
5ecb8c956c add notification page 2025-05-29 10:16:58 +02:00
096a70bb5d better mobile dash 2025-05-28 22:45:43 +02:00
4f8261bda9 cache busting JS files 2025-05-28 21:55:26 +02:00
c8dd4ac165 cache busting on CSS files 2025-05-28 21:50:18 +02:00
b70e4624cb mobile settings for base template 2025-05-28 21:36:03 +02:00
5c5829c487 documentation for all JS files 2025-05-28 16:01:18 +02:00
1134f5b099 restore starring 2025-05-28 14:24:53 +02:00
6272f71355 better creator badges 2025-05-28 14:10:33 +02:00
082924a3ba fixed messaging! 2025-05-28 14:06:36 +02:00
2a1b6f8a22 fix trashing in rooms 2025-05-28 13:45:32 +02:00
d77dcec068 fix move 2025-05-28 12:32:40 +02:00
ef4b4ab39f fix downloads 2025-05-28 12:13:56 +02:00
552d1feb2e fix file batch deletion 2025-05-28 12:09:56 +02:00
9b98370989 Started room separation 2025-05-28 11:37:25 +02:00
11446e00db debugger cleanup 2025-05-28 10:06:30 +02:00
d4465c20a8 rooms list separation 2025-05-28 10:05:00 +02:00
92bf70974f room member separation 2025-05-28 10:03:47 +02:00
71072994b5 Update profile.html 2025-05-28 10:02:38 +02:00
b091f1bb4e convo manager separation 2025-05-28 10:01:04 +02:00
c9c0eba15b convo-list separation 2025-05-28 09:59:40 +02:00
5c5d03e60c conversation scripts and style separation 2025-05-28 09:58:47 +02:00
56d9b5e95b contact list JS separation 2025-05-28 09:55:16 +02:00
e20af39e83 contact form JS separation 2025-05-28 09:53:57 +02:00
437a054d3b color-logger script 2025-05-28 09:51:52 +02:00
669a96174c Revert "Update conversation.html"
This reverts commit d76bee84f9.
2025-05-27 16:52:30 +02:00
d76bee84f9 Update conversation.html 2025-05-27 16:43:40 +02:00
348a1dd601 better spinner icon 2025-05-27 16:25:40 +02:00
c0d93fe6ac Update conversation.html 2025-05-27 16:21:08 +02:00
c12ccaab53 Update conversation.html 2025-05-27 16:15:39 +02:00
45b3fb0cd6 implement socket heartbeat 2025-05-27 16:12:15 +02:00
e9b1fb6577 socketio changes 2025-05-27 16:06:59 +02:00
26572b740e Update conversation.html 2025-05-27 16:02:21 +02:00
ca0c3ef4bd Update conversation.html 2025-05-27 15:58:00 +02:00
37cc454804 Update conversation.html 2025-05-27 15:52:14 +02:00
586337ceec Update conversation.html 2025-05-27 15:49:49 +02:00
5bb37da909 Update room.html 2025-05-27 15:22:43 +02:00
737da6c8d9 Update base.css 2025-05-27 15:17:00 +02:00
149487195b enforce password change if password is changeme 2025-05-27 15:13:36 +02:00
071b8ca2aa Fix new user creation 2025-05-27 15:01:12 +02:00
95407cc3d7 remove flash messages 2025-05-27 14:53:45 +02:00
160794404e fix info navigation to room 2025-05-27 14:52:02 +02:00
91a70a720c fixed star colours 2025-05-27 14:49:55 +02:00
17e1c4181f Revert "add logging for cache busting colors"
This reverts commit 43e1ea37d5.
2025-05-27 14:43:32 +02:00
43e1ea37d5 add logging for cache busting colors 2025-05-27 14:40:22 +02:00
1a9d848459 fix more colors 2025-05-27 14:37:23 +02:00
0c745e7544 Fix colour system 2025-05-27 14:36:40 +02:00
60582d4520 Fix xolours and dashboard 2025-05-27 13:30:44 +02:00
9dc5fbdebc fix sync issue in debug tab 2025-05-27 13:25:19 +02:00
fe66775dc8 Add database cleanup buttons 2025-05-27 12:07:43 +02:00
3e3451adda permanent delete folders fix 2025-05-27 11:55:54 +02:00
f0a2f28f8e fix folder rename not retaining files 2025-05-27 11:52:51 +02:00
dca23787e4 added file sync button 2025-05-27 11:52:02 +02:00
a67470d616 Fix permanent delete 2025-05-27 11:43:10 +02:00
e5d54c499b fixed eventlistener setups based on permissions 2025-05-27 11:31:58 +02:00
65c71ced4d fix permission on room 2025-05-27 11:12:48 +02:00
5cd470bf5a fix broken dash 2025-05-27 11:09:53 +02:00
9860a9a36c fix some errors 2025-05-27 10:53:06 +02:00
a987a551a1 Update conversations.html 2025-05-27 10:40:48 +02:00
7ecb3ae400 fix dashboard loading issue on docker 2025-05-27 10:38:35 +02:00
674596782d fix migrations 2025-05-26 22:30:03 +02:00
a71fb202bb Update docker-compose.yml 2025-05-26 22:28:20 +02:00
12f73c46a7 Update docker-compose.yml 2025-05-26 22:27:22 +02:00
35f6dd4827 fix for last name and company 2025-05-26 22:23:51 +02:00
791b232c0a add default admin user 2025-05-26 22:20:52 +02:00
0e03681d82 Update entrypoint.sh 2025-05-26 22:08:20 +02:00
fd356fbd1c add default user for testing 2025-05-26 22:06:13 +02:00
a198fa3e0e login improvements 2025-05-26 21:50:49 +02:00
029815c218 fix search on rooms 2025-05-26 21:41:47 +02:00
81a97dafae fix csfr token on contact delete 2025-05-26 21:38:33 +02:00
0aadd1f5e9 Fix trash 2025-05-26 21:36:27 +02:00
c00fe16b94 fix issues with login 2025-05-26 21:27:29 +02:00
8e55893abb Update docker-compose.yml 2025-05-26 21:19:42 +02:00
084f5526a4 Update docker-compose.yml 2025-05-26 20:54:41 +02:00
b7a3059426 Update docker-compose.yml 2025-05-26 20:52:29 +02:00
cac35a53c6 Update docker-compose.yml 2025-05-26 20:49:31 +02:00
661534692d Update docker-compose.yml 2025-05-26 20:48:28 +02:00
517a063747 Update docker-compose.yml 2025-05-26 20:47:22 +02:00
ab4dcaa199 Update docker-compose.yml 2025-05-26 20:46:59 +02:00
710964fe72 Update docker-compose.yml 2025-05-26 20:46:08 +02:00
e26615e7c8 Update docker-compose.yml 2025-05-26 20:45:05 +02:00
7602a2a930 Delete stack.env 2025-05-26 20:43:43 +02:00
fa76e7044c Update docker-compose.yml 2025-05-26 20:42:54 +02:00
20d834b1dc no more env 2025-05-26 20:10:55 +02:00
47df04b078 delete trailing space 2025-05-26 20:08:13 +02:00
df0ace6e31 Update docker-compose.yml 2025-05-26 17:34:37 +02:00
9e0e17a8cd env 2025-05-26 17:11:52 +02:00
f546430daa env fixes again 2025-05-26 16:13:44 +02:00
6e62e21b0c try fix launch order 2025-05-26 16:12:06 +02:00
b0ed651abe Update app.py 2025-05-26 16:10:32 +02:00
1814c6669a other method to do env 2025-05-26 16:09:06 +02:00
4b8396a3bc set up docker env 2025-05-26 15:58:45 +02:00
76e542485a Update entrypoint.sh 2025-05-26 15:50:37 +02:00
7b60927941 fix migrations hopefully 2025-05-26 15:49:22 +02:00
0bf66d4430 enable docker DB migrations 2025-05-26 15:45:23 +02:00
8832fe3061 Update docker-compose.yml 2025-05-26 15:44:15 +02:00
aee26682db fix db connection 2025-05-26 15:44:06 +02:00
331 changed files with 36955 additions and 3782 deletions

5
.gitignore vendored
View File

@@ -26,4 +26,7 @@ logs/
*.log
# Testing
coverage/
coverage/
# Python cache
__pycache__/

View File

@@ -1,26 +1,38 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
build-essential \
libpq-dev \
postgresql-client \
curl \
netcat-traditional \
dos2unix \
&& rm -rf /var/lib/apt/lists/*
# Create a non-root user
RUN useradd -m -u 1000 celery
# Set working directory
WORKDIR /app
# Copy requirements first to leverage Docker cache
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of the application
# Copy application code
COPY . .
# Set environment variables
ENV FLASK_APP=app.py
ENV FLASK_ENV=development
# Convert line endings and set permissions
RUN dos2unix /app/entrypoint.sh && \
chmod +x /app/entrypoint.sh && \
mkdir -p /app/uploads/rooms /app/uploads/profile_pics /app/static/uploads && \
chown -R celery:celery /app && \
chmod -R 755 /app/uploads
# Expose the port the app runs on
EXPOSE 5000
# Switch to non-root user
USER celery
# Command to run the application
# Set entrypoint
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["gunicorn", "--bind", "0.0.0.0:5000", "app:app"]

274
NGINX_swagger.json Normal file
View File

@@ -0,0 +1,274 @@
{
"openapi": "3.1.0",
"info": {
"title": "Nginx Proxy Manager API",
"version": "2.x.x"
},
"servers": [
{
"url": "http://127.0.0.1:81/api"
}
],
"components": {
"securitySchemes": {
"bearerAuth": {
"type": "http",
"scheme": "bearer",
"bearerFormat": "JWT"
}
}
},
"paths": {
"/": {
"get": {
"$ref": "./paths/get.json"
}
},
"/audit-log": {
"get": {
"$ref": "./paths/audit-log/get.json"
}
},
"/nginx/access-lists": {
"get": {
"$ref": "./paths/nginx/access-lists/get.json"
},
"post": {
"$ref": "./paths/nginx/access-lists/post.json"
}
},
"/nginx/access-lists/{listID}": {
"get": {
"$ref": "./paths/nginx/access-lists/listID/get.json"
},
"put": {
"$ref": "./paths/nginx/access-lists/listID/put.json"
},
"delete": {
"$ref": "./paths/nginx/access-lists/listID/delete.json"
}
},
"/nginx/certificates": {
"get": {
"$ref": "./paths/nginx/certificates/get.json"
},
"post": {
"$ref": "./paths/nginx/certificates/post.json"
}
},
"/nginx/certificates/validate": {
"post": {
"$ref": "./paths/nginx/certificates/validate/post.json"
}
},
"/nginx/certificates/test-http": {
"get": {
"$ref": "./paths/nginx/certificates/test-http/get.json"
}
},
"/nginx/certificates/{certID}": {
"get": {
"$ref": "./paths/nginx/certificates/certID/get.json"
},
"delete": {
"$ref": "./paths/nginx/certificates/certID/delete.json"
}
},
"/nginx/certificates/{certID}/download": {
"get": {
"$ref": "./paths/nginx/certificates/certID/download/get.json"
}
},
"/nginx/certificates/{certID}/renew": {
"post": {
"$ref": "./paths/nginx/certificates/certID/renew/post.json"
}
},
"/nginx/certificates/{certID}/upload": {
"post": {
"$ref": "./paths/nginx/certificates/certID/upload/post.json"
}
},
"/nginx/proxy-hosts": {
"get": {
"$ref": "./paths/nginx/proxy-hosts/get.json"
},
"post": {
"$ref": "./paths/nginx/proxy-hosts/post.json"
}
},
"/nginx/proxy-hosts/{hostID}": {
"get": {
"$ref": "./paths/nginx/proxy-hosts/hostID/get.json"
},
"put": {
"$ref": "./paths/nginx/proxy-hosts/hostID/put.json"
},
"delete": {
"$ref": "./paths/nginx/proxy-hosts/hostID/delete.json"
}
},
"/nginx/proxy-hosts/{hostID}/enable": {
"post": {
"$ref": "./paths/nginx/proxy-hosts/hostID/enable/post.json"
}
},
"/nginx/proxy-hosts/{hostID}/disable": {
"post": {
"$ref": "./paths/nginx/proxy-hosts/hostID/disable/post.json"
}
},
"/nginx/redirection-hosts": {
"get": {
"$ref": "./paths/nginx/redirection-hosts/get.json"
},
"post": {
"$ref": "./paths/nginx/redirection-hosts/post.json"
}
},
"/nginx/redirection-hosts/{hostID}": {
"get": {
"$ref": "./paths/nginx/redirection-hosts/hostID/get.json"
},
"put": {
"$ref": "./paths/nginx/redirection-hosts/hostID/put.json"
},
"delete": {
"$ref": "./paths/nginx/redirection-hosts/hostID/delete.json"
}
},
"/nginx/redirection-hosts/{hostID}/enable": {
"post": {
"$ref": "./paths/nginx/redirection-hosts/hostID/enable/post.json"
}
},
"/nginx/redirection-hosts/{hostID}/disable": {
"post": {
"$ref": "./paths/nginx/redirection-hosts/hostID/disable/post.json"
}
},
"/nginx/dead-hosts": {
"get": {
"$ref": "./paths/nginx/dead-hosts/get.json"
},
"post": {
"$ref": "./paths/nginx/dead-hosts/post.json"
}
},
"/nginx/dead-hosts/{hostID}": {
"get": {
"$ref": "./paths/nginx/dead-hosts/hostID/get.json"
},
"put": {
"$ref": "./paths/nginx/dead-hosts/hostID/put.json"
},
"delete": {
"$ref": "./paths/nginx/dead-hosts/hostID/delete.json"
}
},
"/nginx/dead-hosts/{hostID}/enable": {
"post": {
"$ref": "./paths/nginx/dead-hosts/hostID/enable/post.json"
}
},
"/nginx/dead-hosts/{hostID}/disable": {
"post": {
"$ref": "./paths/nginx/dead-hosts/hostID/disable/post.json"
}
},
"/nginx/streams": {
"get": {
"$ref": "./paths/nginx/streams/get.json"
},
"post": {
"$ref": "./paths/nginx/streams/post.json"
}
},
"/nginx/streams/{streamID}": {
"get": {
"$ref": "./paths/nginx/streams/streamID/get.json"
},
"put": {
"$ref": "./paths/nginx/streams/streamID/put.json"
},
"delete": {
"$ref": "./paths/nginx/streams/streamID/delete.json"
}
},
"/nginx/streams/{streamID}/enable": {
"post": {
"$ref": "./paths/nginx/streams/streamID/enable/post.json"
}
},
"/nginx/streams/{streamID}/disable": {
"post": {
"$ref": "./paths/nginx/streams/streamID/disable/post.json"
}
},
"/reports/hosts": {
"get": {
"$ref": "./paths/reports/hosts/get.json"
}
},
"/schema": {
"get": {
"$ref": "./paths/schema/get.json"
}
},
"/settings": {
"get": {
"$ref": "./paths/settings/get.json"
}
},
"/settings/{settingID}": {
"get": {
"$ref": "./paths/settings/settingID/get.json"
},
"put": {
"$ref": "./paths/settings/settingID/put.json"
}
},
"/tokens": {
"get": {
"$ref": "./paths/tokens/get.json"
},
"post": {
"$ref": "./paths/tokens/post.json"
}
},
"/users": {
"get": {
"$ref": "./paths/users/get.json"
},
"post": {
"$ref": "./paths/users/post.json"
}
},
"/users/{userID}": {
"get": {
"$ref": "./paths/users/userID/get.json"
},
"put": {
"$ref": "./paths/users/userID/put.json"
},
"delete": {
"$ref": "./paths/users/userID/delete.json"
}
},
"/users/{userID}/auth": {
"put": {
"$ref": "./paths/users/userID/auth/put.json"
}
},
"/users/{userID}/permissions": {
"put": {
"$ref": "./paths/users/userID/permissions/put.json"
}
},
"/users/{userID}/login": {
"post": {
"$ref": "./paths/users/userID/login/post.json"
}
}
}
}

View File

@@ -10,8 +10,9 @@ DocuPulse is a powerful document management system designed to streamline docume
### Prerequisites
- Node.js (version 18 or higher)
- npm or yarn
- Python 3.11 or higher
- PostgreSQL 13 or higher
- Docker and Docker Compose (for containerized deployment)
### Installation
@@ -23,18 +24,50 @@ cd docupulse
2. Install dependencies:
```bash
npm install
# or
yarn install
pip install -r requirements.txt
```
3. Start the development server:
3. Set up environment variables:
```bash
npm run dev
# or
yarn dev
# Copy example environment file
cp .env.example .env
# Set version information for local development
python set_version.py
```
4. Initialize the database:
```bash
flask db upgrade
flask create-admin
```
5. Start the development server:
```bash
python app.py
```
## Version Tracking
DocuPulse uses a database-only approach for version tracking:
- **Environment Variables**: Version information is passed via environment variables (`APP_VERSION`, `GIT_COMMIT`, `GIT_BRANCH`, `DEPLOYED_AT`)
- **Database Storage**: Instance version information is stored in the `instances` table
- **API Endpoint**: Version information is available via `/api/version`
### Setting Version Information
For local development:
```bash
python set_version.py
```
For production deployments, set the following environment variables:
- `APP_VERSION`: Application version/tag
- `GIT_COMMIT`: Git commit hash
- `GIT_BRANCH`: Git branch name
- `DEPLOYED_AT`: Deployment timestamp
## Features
- Document upload and management
@@ -42,6 +75,8 @@ yarn dev
- Secure document storage
- User authentication and authorization
- Document version control
- Multi-tenant instance management
- RESTful API
## Contributing

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

175
app.py
View File

@@ -1,29 +1,45 @@
from flask import Flask, send_from_directory
import random
from flask import Flask, send_from_directory, jsonify
from flask_migrate import Migrate
from dotenv import load_dotenv
import os
from models import User
from models import User, SiteSettings
from flask_wtf.csrf import generate_csrf
from routes.room_files import room_files_bp
from routes.user import user_bp
from routes.room_members import room_members_bp
from routes.trash import trash_bp
from routes.admin_api import admin_api
from routes.launch_api import launch_api
from tasks import cleanup_trash
import click
from utils import timeago
from extensions import db, login_manager, csrf, socketio
from extensions import db, login_manager, csrf
from utils.email_templates import create_default_templates
from datetime import datetime
from sqlalchemy import text
from utils.asset_utils import get_asset_version
# Load environment variables
load_dotenv()
print("Environment variables after loading .env:")
print(f"MASTER: {os.getenv('MASTER')}")
print(f"ISMASTER: {os.getenv('ISMASTER')}")
def create_app():
app = Flask(__name__)
# Configure the database
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:1253@localhost:5432/docupulse'
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', 'postgresql://postgres:1253@localhost:5432/docupulse')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'your-secret-key-here'
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', 'your-secure-secret-key-here')
app.config['UPLOAD_FOLDER'] = os.path.join(app.root_path, 'static', 'uploads')
app.config['CSS_VERSION'] = os.getenv('CSS_VERSION', '1.0.3') # Add CSS version for cache busting
app.config['SERVER_NAME'] = os.getenv('SERVER_NAME', '127.0.0.1:5000')
app.config['PREFERRED_URL_SCHEME'] = os.getenv('PREFERRED_URL_SCHEME', 'http')
# Configure request timeouts for long-running operations
app.config['REQUEST_TIMEOUT'] = int(os.getenv('REQUEST_TIMEOUT', '300')) # 5 minutes default
app.config['STACK_DEPLOYMENT_TIMEOUT'] = int(os.getenv('STACK_DEPLOYMENT_TIMEOUT', '300')) # 5 minutes for stack deployment
# Initialize extensions
db.init_app(app)
@@ -31,24 +47,68 @@ def create_app():
login_manager.init_app(app)
login_manager.login_view = 'auth.login'
csrf.init_app(app)
socketio.init_app(app)
@app.context_processor
def inject_csrf_token():
return dict(csrf_token=generate_csrf())
@app.context_processor
def inject_config():
site_settings = SiteSettings.query.first()
if not site_settings:
site_settings = SiteSettings()
db.session.add(site_settings)
db.session.commit()
return dict(config=app.config, site_settings=site_settings)
@app.context_processor
def inject_unread_notifications():
from flask_login import current_user
from utils import get_unread_count
if current_user.is_authenticated:
unread_count = get_unread_count(current_user.id)
return {'unread_notifications': unread_count}
return {'unread_notifications': 0}
@app.template_filter('asset_version')
def asset_version_filter(filename):
"""Template filter to get version hash for static assets"""
return get_asset_version(filename) or ''
# User loader for Flask-Login
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# Health check endpoint
@app.route('/health')
def health_check():
try:
# Check database connection with a timeout
db.session.execute(text('SELECT 1'))
db.session.commit()
return jsonify({
'status': 'healthy',
'database': 'connected',
'timestamp': datetime.utcnow().isoformat()
}), 200
except Exception as e:
app.logger.error(f"Health check failed: {str(e)}")
return jsonify({
'status': 'unhealthy',
'error': str(e),
'timestamp': datetime.utcnow().isoformat()
}), 500
# Initialize routes
from routes import init_app
init_app(app)
app.register_blueprint(room_files_bp, url_prefix='/api/rooms')
app.register_blueprint(room_members_bp, url_prefix='/api/rooms')
app.register_blueprint(trash_bp, url_prefix='/api/rooms')
app.register_blueprint(user_bp)
app.register_blueprint(trash_bp, url_prefix='/api/trash')
app.register_blueprint(admin_api, url_prefix='/api/admin')
app.register_blueprint(launch_api, url_prefix='/api/admin')
@app.cli.command("cleanup-trash")
def cleanup_trash_command():
@@ -56,16 +116,109 @@ def create_app():
cleanup_trash()
click.echo("Trash cleanup completed.")
@app.cli.command("cleanup-tokens")
def cleanup_tokens_command():
"""Clean up expired password reset and setup tokens."""
from tasks import cleanup_expired_tokens
cleanup_expired_tokens()
click.echo("Token cleanup completed.")
@app.cli.command("create-admin")
def create_admin():
"""Create the default administrator user."""
admin = User.query.filter_by(email='administrator@docupulse.com').first()
if admin:
click.echo("Admin user already exists.")
return
admin = User(
username='administrator',
email='administrator@docupulse.com',
last_name='Administrator',
company='DocuPulse',
position='System Administrator',
is_admin=True,
is_active=True,
preferred_view='grid'
)
admin.set_password('changeme')
db.session.add(admin)
db.session.commit()
click.echo("Default administrator user created successfully.")
click.echo("Admin credentials:")
click.echo("Email: administrator@docupulse.com")
click.echo("Password: changeme")
# Register custom filters
app.jinja_env.filters['timeago'] = timeago
# Create default email templates if they don't exist
with app.app_context():
try:
# Ensure database tables exist
db.create_all()
# Create admin user first
admin = User.query.filter_by(email='administrator@docupulse.com').first()
if not admin:
admin = User(
username='administrator',
email='administrator@docupulse.com',
last_name='Administrator',
company='DocuPulse',
position='System Administrator',
is_admin=True,
is_active=True,
preferred_view='grid'
)
admin.set_password('changeme')
db.session.add(admin)
db.session.commit()
print("Default administrator user created successfully.")
print("Admin credentials:")
print("Email: administrator@docupulse.com")
print("Password: changeme")
# Then create default templates
create_default_templates()
except Exception as e:
print(f"Warning: Could not create default templates: {e}")
return app
app = create_app()
@app.errorhandler(404)
def page_not_found(e):
from flask import render_template
return render_template('common/404.html'), 404
@app.errorhandler(403)
def forbidden(e):
from flask import render_template
return render_template('common/403.html'), 403
@app.errorhandler(401)
def unauthorized(e):
from flask import render_template
return render_template('common/401.html'), 401
@app.errorhandler(400)
def bad_request(e):
from flask import render_template
return render_template('common/400.html'), 400
@app.errorhandler(500)
def internal_server_error(e):
from flask import render_template
import traceback
error_details = f"{str(e)}\n\n{traceback.format_exc()}"
app.logger.error(f"500 error: {error_details}")
return render_template('common/500.html', error=error_details), 500
@app.route('/uploads/profile_pics/<filename>')
def profile_pic(filename):
return send_from_directory(os.path.join(os.getcwd(), 'uploads', 'profile_pics'), filename)
return send_from_directory('/app/uploads/profile_pics', filename)
if __name__ == '__main__':
socketio.run(app, debug=True)
app.run(debug=True)

11
create_notifs_table.py Normal file
View File

@@ -0,0 +1,11 @@
from app import app, db
from models import Notif
def create_notifs_table():
with app.app_context():
# Create the table
Notif.__table__.create(db.engine)
print("Notifications table created successfully!")
if __name__ == '__main__':
create_notifs_table()

View File

@@ -1,28 +1,68 @@
version: '3.8'
networks:
docupulse_network:
driver: bridge
services:
web:
build: .
build:
# context: .
# dockerfile: Dockerfile
context: https://git.kobeamerijckx.com/Kobe/docupulse.git
dockerfile: Dockerfile
ports:
- "10335:5000"
volumes:
- ./uploads:/app/uploads
- "${PORT:-10335}:5000"
environment:
- FLASK_APP=app.py
- FLASK_ENV=development
- UPLOAD_FOLDER=/app/uploads
- FLASK_ENV=production
- DATABASE_URL=postgresql://docupulse_${PORT:-10335}:docupulse_${PORT:-10335}@db:5432/docupulse_${PORT:-10335}
- POSTGRES_USER=docupulse_${PORT:-10335}
- POSTGRES_PASSWORD=docupulse_${PORT:-10335}
- POSTGRES_DB=docupulse_${PORT:-10335}
- MASTER=${ISMASTER:-false}
- APP_VERSION=${APP_VERSION:-unknown}
- GIT_COMMIT=${GIT_COMMIT:-unknown}
- GIT_BRANCH=${GIT_BRANCH:-unknown}
- DEPLOYED_AT=${DEPLOYED_AT:-unknown}
volumes:
- docupulse_uploads:/app/uploads
depends_on:
- db
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5000/health"]
interval: 60s
timeout: 30s
retries: 3
start_period: 120s
deploy:
resources:
limits:
cpus: '1'
memory: 1G
networks:
- docupulse_network
db:
image: postgres:13
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=docupulse
- POSTGRES_USER=docupulse_${PORT:-10335}
- POSTGRES_PASSWORD=docupulse_${PORT:-10335}
- POSTGRES_DB=docupulse_${PORT:-10335}
volumes:
- postgres_data:/var/lib/postgresql/data
- docupulse_postgres_data:/var/lib/postgresql/data
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U docupulse_${PORT:-10335}"]
interval: 30s
timeout: 10s
retries: 3
networks:
- docupulse_network
volumes:
postgres_data:
uploads:
docupulse_postgres_data:
name: docupulse_${PORT:-10335}_postgres_data
docupulse_uploads:
name: docupulse_${PORT:-10335}_uploads

151
entrypoint.sh Normal file
View File

@@ -0,0 +1,151 @@
#!/bin/bash
# Print environment variables for debugging
echo "Environment variables:"
echo "POSTGRES_USER: $POSTGRES_USER"
echo "POSTGRES_PASSWORD: $POSTGRES_PASSWORD"
echo "POSTGRES_DB: $POSTGRES_DB"
echo "DATABASE_URL: $DATABASE_URL"
# Function to wait for database
wait_for_db() {
echo "Waiting for database..."
while ! nc -z db 5432; do
sleep 1
done
echo "Database is ready!"
}
# Function to create database if it doesn't exist
create_database() {
echo "Creating database if it doesn't exist..."
PGPASSWORD=$POSTGRES_PASSWORD psql -h db -U $POSTGRES_USER -tc "SELECT 1 FROM pg_database WHERE datname = '$POSTGRES_DB'" | grep -q 1 || \
PGPASSWORD=$POSTGRES_PASSWORD psql -h db -U $POSTGRES_USER -c "CREATE DATABASE $POSTGRES_DB"
echo "Database check/creation complete!"
}
# Wait for database to be ready
wait_for_db
# Create database if it doesn't exist
create_database
# Wait for PostgreSQL to be ready to accept connections
echo "Waiting for PostgreSQL to accept connections..."
until PGPASSWORD=$POSTGRES_PASSWORD psql -h db -U $POSTGRES_USER -d $POSTGRES_DB -c '\q'; do
echo "PostgreSQL is unavailable - sleeping"
sleep 1
done
echo "PostgreSQL is up - executing command"
# Run all initialization in a single Python script to avoid multiple Flask instances
echo "Running initialization..."
python3 -c "
import sys
from app import create_app
from models import SiteSettings, db, User
from utils.email_templates import create_default_templates
def log_error(message, error=None):
print(f'ERROR: {message}', file=sys.stderr)
if error:
print(f'Error details: {str(error)}', file=sys.stderr)
app = create_app()
with app.app_context():
try:
# Run migrations
print('Running database migrations...')
from flask_migrate import upgrade
upgrade()
print('Database migrations completed successfully')
# Create default site settings
print('Creating default site settings...')
try:
settings = SiteSettings.get_settings()
print('Default site settings created successfully')
except Exception as e:
log_error('Error creating site settings', e)
# Create admin user if it doesn't exist
print('Creating admin user...')
try:
# Check for admin user by both username and email to avoid constraint violations
admin_by_username = User.query.filter_by(username='administrator').first()
admin_by_email = User.query.filter_by(email='administrator@docupulse.com').first()
if admin_by_username and admin_by_email and admin_by_username.id == admin_by_email.id:
print('Admin user already exists (found by both username and email).')
print('Admin credentials:')
print('Email: administrator@docupulse.com')
print('Password: changeme')
elif admin_by_username or admin_by_email:
print('WARNING: Found partial admin user data:')
if admin_by_username:
print(f' - Found user with username "administrator" (ID: {admin_by_username.id})')
if admin_by_email:
print(f' - Found user with email "administrator@docupulse.com" (ID: {admin_by_email.id})')
print('Admin credentials:')
print('Email: administrator@docupulse.com')
print('Password: changeme')
else:
print('Admin user not found, creating new admin user...')
admin = User(
username='administrator',
email='administrator@docupulse.com',
last_name='Administrator',
company='DocuPulse',
position='System Administrator',
is_admin=True,
is_active=True,
preferred_view='grid'
)
admin.set_password('changeme')
print('Admin user object created, attempting to add to database...')
db.session.add(admin)
try:
db.session.commit()
print('Default administrator user created successfully.')
print('Admin credentials:')
print('Email: administrator@docupulse.com')
print('Password: changeme')
except Exception as commit_error:
db.session.rollback()
if 'duplicate key value violates unique constraint' in str(commit_error):
print('WARNING: Admin user creation failed due to duplicate key constraint.')
print('This might indicate a race condition or the user was created by another process.')
print('Checking for existing admin user again...')
# Check again after the failed commit
admin_by_username = User.query.filter_by(username='administrator').first()
admin_by_email = User.query.filter_by(email='administrator@docupulse.com').first()
if admin_by_username or admin_by_email:
print('Admin user now exists (likely created by another process).')
print('Admin credentials:')
print('Email: administrator@docupulse.com')
print('Password: changeme')
else:
log_error('Admin user creation failed and user still not found', commit_error)
raise
else:
log_error('Failed to commit admin user creation', commit_error)
raise
except Exception as e:
log_error('Error during admin user creation/check', e)
raise
# Create default templates
print('Creating default templates...')
try:
create_default_templates()
print('Default templates created successfully')
except Exception as e:
log_error('Error creating default templates', e)
except Exception as e:
log_error('Fatal error during initialization', e)
sys.exit(1)
"
# Start the application
echo "Starting application..."
exec gunicorn --bind 0.0.0.0:5000 app:app

View File

@@ -1,4 +1,3 @@
from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
@@ -6,5 +5,4 @@ from flask_wtf.csrf import CSRFProtect
# Initialize extensions
db = SQLAlchemy()
login_manager = LoginManager()
csrf = CSRFProtect()
socketio = SocketIO(cors_allowed_origins="*")
csrf = CSRFProtect()

View File

@@ -1,5 +1,5 @@
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, BooleanField, SubmitField, PasswordField, SelectMultipleField
from wtforms import StringField, TextAreaField, BooleanField, SubmitField, PasswordField, SelectMultipleField, SelectField
from wtforms.validators import DataRequired, Email, Length, Optional, ValidationError
from models import User
from flask_login import current_user
@@ -13,8 +13,11 @@ class UserForm(FlaskForm):
company = StringField('Company (Optional)', validators=[Optional(), Length(max=100)])
position = StringField('Position (Optional)', validators=[Optional(), Length(max=100)])
notes = TextAreaField('Notes (Optional)', validators=[Optional()])
is_active = BooleanField('Active', default=True)
is_admin = BooleanField('Admin Role', default=False)
role = SelectField('Role', choices=[
('user', 'Standard User'),
('manager', 'Manager'),
('admin', 'Administrator')
], validators=[DataRequired()])
new_password = PasswordField('New Password (Optional)')
confirm_password = PasswordField('Confirm Password (Optional)')
profile_picture = FileField('Profile Picture (Optional)', validators=[FileAllowed(['jpg', 'jpeg', 'png', 'gif'], 'Images only!')])
@@ -31,6 +34,11 @@ class UserForm(FlaskForm):
if total_admins <= 1:
raise ValidationError('There must be at least one admin user in the system.')
def validate_is_manager(self, field):
# Prevent setting both admin and manager roles
if field.data and self.is_admin.data:
raise ValidationError('A user cannot be both an admin and a manager.')
def validate(self, extra_validators=None):
rv = super().validate(extra_validators=extra_validators)
if not rv:
@@ -57,4 +65,18 @@ class ConversationForm(FlaskForm):
def __init__(self, *args, **kwargs):
super(ConversationForm, self).__init__(*args, **kwargs)
self.members.choices = [(u.id, f"{u.username} {u.last_name}") for u in User.query.filter_by(is_active=True).all()]
self.members.choices = [(u.id, f"{u.username} {u.last_name}") for u in User.query.filter_by(is_active=True).all()]
class CompanySettingsForm(FlaskForm):
company_name = StringField('Company Name', validators=[Optional(), Length(max=100)])
company_website = StringField('Website', validators=[Optional(), Length(max=200)])
company_email = StringField('Email', validators=[Optional(), Email(), Length(max=100)])
company_phone = StringField('Phone', validators=[Optional(), Length(max=20)])
company_address = StringField('Address', validators=[Optional(), Length(max=200)])
company_city = StringField('City', validators=[Optional(), Length(max=100)])
company_state = StringField('State', validators=[Optional(), Length(max=100)])
company_zip = StringField('ZIP Code', validators=[Optional(), Length(max=20)])
company_country = StringField('Country', validators=[Optional(), Length(max=100)])
company_description = TextAreaField('Description', validators=[Optional()])
company_industry = StringField('Industry', validators=[Optional(), Length(max=100)])
company_logo = FileField('Company Logo', validators=[FileAllowed(['jpg', 'jpeg', 'png', 'gif'], 'Images only!')])

24
init_admin.py Normal file
View File

@@ -0,0 +1,24 @@
from app import app, db
from models import User
def init_admin():
with app.app_context():
admin = User.query.filter_by(email='administrator@docupulse.com').first()
if not admin:
admin = User(
username='administrator',
email='administrator@docupulse.com',
last_name='None',
company='docupulse',
is_admin=True,
is_active=True
)
admin.set_password('q]H488h[8?.A')
db.session.add(admin)
db.session.commit()
print("Default administrator user created successfully.")
else:
print("Admin user already exists.")
if __name__ == '__main__':
init_admin()

Binary file not shown.

View File

@@ -0,0 +1,41 @@
"""add key value settings table
Revision ID: 0a8006bd1732
Revises: 20519a2437c2
Create Date: 2025-06-02 14:10:54.033943
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = '0a8006bd1732'
down_revision = '20519a2437c2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'key_value_settings' not in tables:
op.create_table('key_value_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=100), nullable=False),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('key_value_settings')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-26 14:00:05.521776
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,23 +19,41 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('message')]
with op.batch_alter_table('message', schema=None) as batch_op:
batch_op.add_column(sa.Column('has_attachment', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('attachment_name', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('attachment_path', sa.String(length=512), nullable=True))
batch_op.add_column(sa.Column('attachment_type', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('attachment_size', sa.Integer(), nullable=True))
if 'has_attachment' not in columns:
batch_op.add_column(sa.Column('has_attachment', sa.Boolean(), nullable=True))
if 'attachment_name' not in columns:
batch_op.add_column(sa.Column('attachment_name', sa.String(length=255), nullable=True))
if 'attachment_path' not in columns:
batch_op.add_column(sa.Column('attachment_path', sa.String(length=512), nullable=True))
if 'attachment_type' not in columns:
batch_op.add_column(sa.Column('attachment_type', sa.String(length=100), nullable=True))
if 'attachment_size' not in columns:
batch_op.add_column(sa.Column('attachment_size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('message')]
with op.batch_alter_table('message', schema=None) as batch_op:
batch_op.drop_column('attachment_size')
batch_op.drop_column('attachment_type')
batch_op.drop_column('attachment_path')
batch_op.drop_column('attachment_name')
batch_op.drop_column('has_attachment')
if 'attachment_size' in columns:
batch_op.drop_column('attachment_size')
if 'attachment_type' in columns:
batch_op.drop_column('attachment_type')
if 'attachment_path' in columns:
batch_op.drop_column('attachment_path')
if 'attachment_name' in columns:
batch_op.drop_column('attachment_name')
if 'has_attachment' in columns:
batch_op.drop_column('has_attachment')
# ### end Alembic commands ###

View File

@@ -2,11 +2,12 @@
Revision ID: 1c297825e3a9
Revises:
Create Date: 2025-05-23 08:39:40.494853
Create Date: 2025-06-02 13:26:30.353000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -17,20 +18,27 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=150), nullable=False),
sa.Column('email', sa.String(length=150), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
# Check if the table exists before creating it
conn = op.get_bind()
inspector = sa.inspect(conn)
if 'user' not in inspector.get_table_names():
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'user' not in tables:
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=150), nullable=False),
sa.Column('email', sa.String(length=150), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
# ### end Alembic commands ###
# ### end Alembic commands ###

View File

@@ -0,0 +1,47 @@
"""add_mails_table
Revision ID: 20519a2437c2
Revises: 444d76da74ba
Create Date: 2025-06-02 09:04:39.972021
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = '20519a2437c2'
down_revision = '444d76da74ba'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'mails' not in tables:
op.create_table('mails',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('recipient', sa.String(length=150), nullable=False),
sa.Column('subject', sa.String(length=200), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('template_id', sa.Integer(), nullable=True),
sa.Column('notif_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['notif_id'], ['notifs.id'], ),
sa.ForeignKeyConstraint(['template_id'], ['email_templates.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('mails')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 16:10:53.731035
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,15 +19,25 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('contact')]
with op.batch_alter_table('contact', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_admin', sa.Boolean(), nullable=True))
if 'is_admin' not in columns:
batch_op.add_column(sa.Column('is_admin', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('contact')]
with op.batch_alter_table('contact', schema=None) as batch_op:
batch_op.drop_column('is_admin')
if 'is_admin' in columns:
batch_op.drop_column('is_admin')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:44:58.832286
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,17 +19,22 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_member_permissions',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('can_view', sa.Boolean(), nullable=False),
sa.Column('can_upload', sa.Boolean(), nullable=False),
sa.Column('can_delete', sa.Boolean(), nullable=False),
sa.Column('can_share', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_member_permissions' not in tables:
op.create_table('room_member_permissions',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('can_view', sa.Boolean(), nullable=False),
sa.Column('can_upload', sa.Boolean(), nullable=False),
sa.Column('can_delete', sa.Boolean(), nullable=False),
sa.Column('can_share', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:27:17.497481
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,15 +19,24 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_members',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
with op.batch_alter_table('room', schema=None) as batch_op:
batch_op.drop_column('is_private')
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_members' not in tables:
op.create_table('room_members',
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('room_id', 'user_id')
)
# Check if is_private column exists before dropping it
columns = [col['name'] for col in inspector.get_columns('room')]
if 'is_private' in columns:
with op.batch_alter_table('room', schema=None) as batch_op:
batch_op.drop_column('is_private')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 21:25:27.880150
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,16 +19,21 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('is_private', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room' not in tables:
op.create_table('room',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('is_private', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-23 09:24:23.926302
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,12 +19,21 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('user')]
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('phone', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('position', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('notes', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('is_active', sa.Boolean(), nullable=True))
if 'phone' not in columns:
batch_op.add_column(sa.Column('phone', sa.String(length=20), nullable=True))
if 'company' not in columns:
batch_op.add_column(sa.Column('company', sa.String(length=100), nullable=True))
if 'position' not in columns:
batch_op.add_column(sa.Column('position', sa.String(length=100), nullable=True))
if 'notes' not in columns:
batch_op.add_column(sa.Column('notes', sa.Text(), nullable=True))
if 'is_active' not in columns:
batch_op.add_column(sa.Column('is_active', sa.Boolean(), nullable=True))
# ### end Alembic commands ###

View File

@@ -0,0 +1,63 @@
"""add_notifications_table
Revision ID: 444d76da74ba
Revises: c770e08966b4
Create Date: 2025-06-02 08:25:48.241102
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '444d76da74ba'
down_revision = 'c770e08966b4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'template_variables' in tables:
op.drop_table('template_variables')
op.create_table('notification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=200), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('type', sa.String(length=50), nullable=False),
sa.Column('read', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'notification' in tables:
op.drop_table('notification')
if 'template_variables' not in tables:
op.create_table('template_variables',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('notification_type', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('variable_name', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('description', sa.VARCHAR(length=200), autoincrement=False, nullable=False),
sa.Column('example_value', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('template_variables_pkey'))
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,24 @@
"""merge heads
Revision ID: 4ee23cb29001
Revises: 72ab6c4c6a5f, add_status_details
Create Date: 2025-06-09 10:04:48.708415
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4ee23cb29001'
down_revision = ('72ab6c4c6a5f', 'add_status_details')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-24 10:07:02.159730
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,22 +19,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('room_file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('path', sa.String(length=1024), nullable=False),
sa.Column('type', sa.String(length=10), nullable=False),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.Column('modified', sa.Float(), nullable=True),
sa.Column('uploaded_by', sa.Integer(), nullable=False),
sa.Column('uploaded_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['uploaded_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('room_member_permissions', schema=None) as batch_op:
batch_op.drop_column('preferred_view')
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'room_file' not in tables:
op.create_table('room_file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('path', sa.String(length=1024), nullable=False),
sa.Column('type', sa.String(length=10), nullable=False),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.Column('modified', sa.Float(), nullable=True),
sa.Column('uploaded_by', sa.Integer(), nullable=False),
sa.Column('uploaded_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['room_id'], ['room.id'], ),
sa.ForeignKeyConstraint(['uploaded_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Check if preferred_view column exists before trying to drop it
columns = [col['name'] for col in inspector.get_columns('room_member_permissions')]
if 'preferred_view' in columns:
with op.batch_alter_table('room_member_permissions', schema=None) as batch_op:
batch_op.drop_column('preferred_view')
# ### end Alembic commands ###
@@ -44,4 +54,4 @@ def downgrade():
batch_op.add_column(sa.Column('preferred_view', sa.VARCHAR(length=10), autoincrement=False, nullable=False))
op.drop_table('room_file')
# ### end Alembic commands ###
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-24 18:14:38.320999
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
@@ -18,43 +19,63 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('room_file')]
with op.batch_alter_table('room_file', schema=None) as batch_op:
batch_op.add_column(sa.Column('starred', sa.Boolean(), nullable=True))
batch_op.alter_column('path',
existing_type=sa.VARCHAR(length=1024),
type_=sa.String(length=255),
existing_nullable=False)
batch_op.alter_column('size',
existing_type=sa.BIGINT(),
type_=sa.Integer(),
existing_nullable=True)
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=True)
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
if 'starred' not in columns:
batch_op.add_column(sa.Column('starred', sa.Boolean(), nullable=True))
# Only alter columns if they exist
if 'path' in columns:
batch_op.alter_column('path',
existing_type=sa.VARCHAR(length=1024),
type_=sa.String(length=255),
existing_nullable=False)
if 'size' in columns:
batch_op.alter_column('size',
existing_type=sa.BIGINT(),
type_=sa.Integer(),
existing_nullable=True)
if 'uploaded_by' in columns:
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=True)
if 'uploaded_at' in columns:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('room_file')]
with op.batch_alter_table('room_file', schema=None) as batch_op:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=False)
batch_op.alter_column('size',
existing_type=sa.Integer(),
type_=sa.BIGINT(),
existing_nullable=True)
batch_op.alter_column('path',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=1024),
existing_nullable=False)
batch_op.drop_column('starred')
if 'uploaded_at' in columns:
batch_op.alter_column('uploaded_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
if 'uploaded_by' in columns:
batch_op.alter_column('uploaded_by',
existing_type=sa.INTEGER(),
nullable=False)
if 'size' in columns:
batch_op.alter_column('size',
existing_type=sa.Integer(),
type_=sa.BIGINT(),
existing_nullable=True)
if 'path' in columns:
batch_op.alter_column('path',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=1024),
existing_nullable=False)
if 'starred' in columns:
batch_op.drop_column('starred')
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""merge heads
Revision ID: 72ab6c4c6a5f
Revises: 0a8006bd1732, add_docupulse_settings, add_manager_role, make_events_user_id_nullable
Create Date: 2025-06-05 14:21:46.046125
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = '72ab6c4c6a5f'
down_revision = ('0a8006bd1732', 'add_docupulse_settings', 'add_manager_role', 'make_events_user_id_nullable')
branch_labels = None
depends_on = None
def upgrade():
# Ensure is_manager column exists
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('user')]
with op.batch_alter_table('user', schema=None) as batch_op:
if 'is_manager' not in columns:
batch_op.add_column(sa.Column('is_manager', sa.Boolean(), nullable=True, server_default='false'))
def downgrade():
pass

View File

@@ -7,6 +7,7 @@ Create Date: 2024-03-19 10:05:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.sql import text

View File

@@ -0,0 +1,24 @@
"""merge heads
Revision ID: 761908f0cacf
Revises: 4ee23cb29001, add_connection_token
Create Date: 2025-06-09 13:57:17.650231
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '761908f0cacf'
down_revision = ('4ee23cb29001', 'add_connection_token')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-25 10:03:03.423064
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.

View File

@@ -7,6 +7,7 @@ Create Date: 2025-05-26 10:42:17.287566
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
@@ -18,17 +19,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = inspect(conn)
columns = [col['name'] for col in inspector.get_columns('site_settings')]
with op.batch_alter_table('site_settings', schema=None) as batch_op:
batch_op.add_column(sa.Column('company_website', sa.String(length=200), nullable=True))
batch_op.add_column(sa.Column('company_email', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_phone', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company_address', sa.String(length=200), nullable=True))
batch_op.add_column(sa.Column('company_city', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_state', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_zip', sa.String(length=20), nullable=True))
batch_op.add_column(sa.Column('company_country', sa.String(length=100), nullable=True))
batch_op.add_column(sa.Column('company_description', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('company_industry', sa.String(length=100), nullable=True))
if 'company_website' not in columns:
batch_op.add_column(sa.Column('company_website', sa.String(length=200), nullable=True))
if 'company_email' not in columns:
batch_op.add_column(sa.Column('company_email', sa.String(length=100), nullable=True))
if 'company_phone' not in columns:
batch_op.add_column(sa.Column('company_phone', sa.String(length=20), nullable=True))
if 'company_address' not in columns:
batch_op.add_column(sa.Column('company_address', sa.String(length=200), nullable=True))
if 'company_city' not in columns:
batch_op.add_column(sa.Column('company_city', sa.String(length=100), nullable=True))
if 'company_state' not in columns:
batch_op.add_column(sa.Column('company_state', sa.String(length=100), nullable=True))
if 'company_zip' not in columns:
batch_op.add_column(sa.Column('company_zip', sa.String(length=20), nullable=True))
if 'company_country' not in columns:
batch_op.add_column(sa.Column('company_country', sa.String(length=100), nullable=True))
if 'company_description' not in columns:
batch_op.add_column(sa.Column('company_description', sa.Text(), nullable=True))
if 'company_industry' not in columns:
batch_op.add_column(sa.Column('company_industry', sa.String(length=100), nullable=True))
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Create Date: 2024-03-19 10:15:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.sql import text

View File

@@ -7,8 +7,10 @@ Create Date: 2025-05-25 21:16:39.683736
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9faab7ef6036'
down_revision = 'ca9026520dad'
@@ -18,25 +20,35 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('site_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('primary_color', sa.String(length=7), nullable=True),
sa.Column('secondary_color', sa.String(length=7), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'site_settings' not in tables:
op.create_table('site_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('primary_color', sa.String(length=7), nullable=True),
sa.Column('secondary_color', sa.String(length=7), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('color_settings')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('color_settings',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('primary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('secondary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('color_settings_pkey'))
)
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if 'color_settings' not in tables:
op.create_table('color_settings',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('primary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('secondary_color', sa.VARCHAR(length=7), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('color_settings_pkey'))
)
op.drop_table('site_settings')
# ### end Alembic commands ###
# ### end Alembic commands ###

Some files were not shown because too many files have changed in this diff Show More