2 Commits

Author SHA1 Message Date
d0d436d116 Create timespent.py 2025-08-26 17:57:33 +02:00
36da0717a2 better launch management caching 2025-08-26 09:02:29 +02:00
3 changed files with 69 additions and 2 deletions

View File

@@ -2326,6 +2326,13 @@ def init_routes(main_bp):
@login_required
@require_password_change
def create_dns_records():
"""
Create or update DNS A records in Cloudflare.
Important: DNS records are created with proxied=False to avoid conflicts
with NGINX Proxy Manager. This ensures direct DNS resolution without
Cloudflare's proxy layer interfering with the NGINX configuration.
"""
if not os.environ.get('MASTER', 'false').lower() == 'true':
return jsonify({'error': 'Unauthorized'}), 403
@@ -2372,7 +2379,7 @@ def init_routes(main_bp):
'name': domain,
'content': cloudflare_settings['server_ip'],
'ttl': 1, # Auto TTL
'proxied': True
'proxied': False # DNS only - no Cloudflare proxy to avoid conflicts with NGINX
}
update_response = requests.put(
@@ -2393,7 +2400,7 @@ def init_routes(main_bp):
'name': domain,
'content': cloudflare_settings['server_ip'],
'ttl': 1, # Auto TTL
'proxied': True
'proxied': False # DNS only - no Cloudflare proxy to avoid conflicts with NGINX
}
create_response = requests.post(

View File

@@ -1830,6 +1830,13 @@ function updateStatus(step, message, type = 'info', details = '') {
}
}
/**
* Create an NGINX proxy host for the specified domains.
*
* Important: Caching is disabled (caching_enabled: false) to ensure real-time
* content delivery and avoid potential issues with cached responses interfering
* with dynamic content or authentication.
*/
async function createProxyHost(domains, port, sslCertificateId) {
try {
// Get NGINX settings from the template

53
timespent.py Normal file
View File

@@ -0,0 +1,53 @@
import subprocess
from datetime import datetime, timedelta
# Run git log command
log_output = subprocess.check_output(
['git', 'log', '--pretty=format:%h %an %ad', '--date=iso'],
text=True
)
# Parse commit dates
commit_times = []
for line in log_output.splitlines():
parts = line.strip().split()
if len(parts) < 4:
continue
# Commit hash, author, datetime string
dt_str = " ".join(parts[2:4]) # "YYYY-MM-DD HH:MM:SS"
try:
dt = datetime.strptime(dt_str, "%Y-%m-%d %H:%M:%S")
commit_times.append(dt)
except ValueError:
continue
# Sort commits chronologically
commit_times.sort()
# Session grouping (commits < 1 hour apart are same session)
SESSION_GAP = timedelta(hours=1)
sessions = []
if commit_times:
start = commit_times[0]
prev = commit_times[0]
for t in commit_times[1:]:
if t - prev > SESSION_GAP:
# Close previous session
sessions.append((start, prev))
start = t
prev = t
sessions.append((start, prev)) # last session
# Estimate durations
total_time = timedelta()
for start, end in sessions:
duration = end - start
# Add a minimum session length (e.g. 30 min) so single commits arent near-zero
if duration < timedelta(minutes=30):
duration = timedelta(minutes=30)
total_time += duration
print(f"Number of commits: {len(commit_times)}")
print(f"Number of sessions: {len(sessions)}")
print(f"Estimated total coding time: {total_time} (~{total_time.total_seconds()/3600:.1f} hours)")