Added statistics cache

This commit is contained in:
Ultradesu
2025-07-21 13:23:10 +03:00
parent 243a6734fd
commit f5e5298461
10 changed files with 422 additions and 65 deletions

View File

@@ -12,7 +12,7 @@ from django.urls import path, reverse
from django.http import HttpResponseRedirect
from django.contrib.auth.admin import UserAdmin
from .models import User, AccessLog, TaskExecutionLog
from .models import User, AccessLog, TaskExecutionLog, UserStatistics
from django.utils.timezone import localtime
from vpn.models import User, ACL, ACLLink
from vpn.forms import UserForm
@@ -25,6 +25,103 @@ from .server_plugins import (
OutlineServer,
OutlineServerAdmin)
@admin.register(UserStatistics)
class UserStatisticsAdmin(admin.ModelAdmin):
list_display = ('user_display', 'server_name', 'link_display', 'total_connections', 'recent_connections', 'max_daily', 'updated_at_display')
list_filter = ('server_name', 'updated_at', 'user__username')
search_fields = ('user__username', 'server_name', 'acl_link_id')
readonly_fields = ('user', 'server_name', 'acl_link_id', 'total_connections', 'recent_connections', 'daily_usage_chart', 'max_daily', 'updated_at')
ordering = ('-updated_at', 'user__username', 'server_name')
list_per_page = 100
fieldsets = (
('Basic Information', {
'fields': ('user', 'server_name', 'acl_link_id')
}),
('Statistics', {
'fields': ('total_connections', 'recent_connections', 'max_daily')
}),
('Usage Chart', {
'fields': ('daily_usage_chart',)
}),
('Metadata', {
'fields': ('updated_at',)
}),
)
@admin.display(description='User', ordering='user__username')
def user_display(self, obj):
return obj.user.username
@admin.display(description='Link', ordering='acl_link_id')
def link_display(self, obj):
if obj.acl_link_id:
link_url = f"{EXTERNAL_ADDRESS}/ss/{obj.acl_link_id}#{obj.server_name}"
return format_html(
'<a href="{}" target="_blank" style="color: #2563eb; text-decoration: none; font-family: monospace;">{}</a>',
link_url, obj.acl_link_id[:12] + '...' if len(obj.acl_link_id) > 12 else obj.acl_link_id
)
return '-'
@admin.display(description='Last Updated', ordering='updated_at')
def updated_at_display(self, obj):
from django.utils import timezone
local_time = localtime(obj.updated_at)
now = timezone.now()
diff = now - obj.updated_at
formatted_date = local_time.strftime('%Y-%m-%d %H:%M')
# Color coding based on freshness
if diff.total_seconds() < 3600: # Less than 1 hour
color = '#16a34a' # green
relative = 'Fresh'
elif diff.total_seconds() < 7200: # Less than 2 hours
color = '#eab308' # yellow
relative = f'{int(diff.total_seconds() // 3600)}h ago'
else:
color = '#dc2626' # red
relative = f'{diff.days}d ago' if diff.days > 0 else f'{int(diff.total_seconds() // 3600)}h ago'
return mark_safe(
f'<span style="color: {color}; font-weight: bold;">{formatted_date}</span>'
f'<br><small style="color: {color};">{relative}</small>'
)
@admin.display(description='Daily Usage Chart')
def daily_usage_chart(self, obj):
if not obj.daily_usage:
return mark_safe('<span style="color: #9ca3af;">No data</span>')
# Create a simple ASCII-style chart
max_val = max(obj.daily_usage) if obj.daily_usage else 1
chart_html = '<div style="font-family: monospace; background: #f9fafb; padding: 10px; border-radius: 4px;">'
chart_html += f'<div style="margin-bottom: 5px; font-size: 12px; color: #6b7280;">Last 30 days (max: {max_val})</div>'
# Create bar chart
chart_html += '<div style="display: flex; align-items: end; gap: 1px; height: 40px;">'
for day_count in obj.daily_usage:
if max_val > 0:
height_percent = (day_count / max_val) * 100
else:
height_percent = 0
color = '#4ade80' if day_count > 0 else '#e5e7eb'
chart_html += f'<div style="background: {color}; width: 3px; height: {height_percent}%; min-height: 2px;" title="{day_count} connections"></div>'
chart_html += '</div></div>'
return mark_safe(chart_html)
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return True # Allow deletion to clear cache
@admin.register(TaskExecutionLog)
class TaskExecutionLogAdmin(admin.ModelAdmin):
list_display = ('task_name_display', 'action', 'status_display', 'server', 'user', 'execution_time_display', 'created_at')
@@ -34,7 +131,7 @@ class TaskExecutionLogAdmin(admin.ModelAdmin):
ordering = ('-created_at',)
list_per_page = 100
date_hierarchy = 'created_at'
actions = ['trigger_full_sync']
actions = ['trigger_full_sync', 'trigger_statistics_update']
fieldsets = (
('Task Information', {
@@ -72,6 +169,30 @@ class TaskExecutionLogAdmin(admin.ModelAdmin):
trigger_full_sync.short_description = "🔄 Trigger full sync of all servers"
def trigger_statistics_update(self, request, queryset):
"""Trigger manual update of user statistics cache"""
# This action doesn't require selected items
try:
from vpn.tasks import update_user_statistics
# Start the statistics update task
task = update_user_statistics.delay()
self.message_user(
request,
f'User statistics update started successfully. Task ID: {task.id}. Check logs below for progress.',
level=messages.SUCCESS
)
except Exception as e:
self.message_user(
request,
f'Failed to start statistics update: {e}',
level=messages.ERROR
)
trigger_statistics_update.short_description = "📊 Update user statistics cache"
def get_actions(self, request):
"""Remove default delete action for logs"""
actions = super().get_actions(request)
@@ -87,6 +208,7 @@ class TaskExecutionLogAdmin(admin.ModelAdmin):
'sync_server_info': '⚙️ Server Info',
'sync_user_on_server': '👤 User Sync',
'cleanup_task_logs': '🧹 Cleanup',
'update_user_statistics': '📊 Statistics',
}
return task_names.get(obj.task_name, obj.task_name)
@@ -131,6 +253,11 @@ class TaskExecutionLogAdmin(admin.ModelAdmin):
self.trigger_full_sync(request, None)
# Return redirect to prevent AttributeError
return redirect(request.get_full_path())
elif action == 'trigger_statistics_update':
# Call the statistics update action
self.trigger_statistics_update(request, None)
# Return redirect to prevent AttributeError
return redirect(request.get_full_path())
return super().changelist_view(request, extra_context)
@@ -896,6 +1023,7 @@ try:
'sync_server_info': '⚙️ Sync Server Info',
'sync_user_on_server': '👤 Sync User on Server',
'cleanup_task_logs': '🧹 Cleanup Old Logs',
'update_user_statistics': '📊 Update Statistics',
}
return task_names.get(obj.task_name, obj.task_name)

View File

@@ -4,4 +4,3 @@ from django.contrib.auth import get_user_model
class VPN(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'vpn'

View File

@@ -0,0 +1 @@
# Django management commands package\n

View File

@@ -0,0 +1 @@
from django.core.management.base import BaseCommand\nfrom django.utils import timezone\nfrom vpn.models import User, ACLLink, UserStatistics\nfrom vpn.tasks import update_user_statistics\n\n\nclass Command(BaseCommand):\n help = 'Initialize user statistics cache by running the update task'\n \n def add_arguments(self, parser):\n parser.add_argument(\n '--async',\n action='store_true',\n help='Run statistics update as async Celery task (default: sync)',\n )\n parser.add_argument(\n '--force',\n action='store_true',\n help='Force update even if statistics already exist',\n )\n \n def handle(self, *args, **options):\n # Check if statistics already exist\n existing_stats = UserStatistics.objects.count()\n \n if existing_stats > 0 and not options['force']:\n self.stdout.write(\n self.style.WARNING(\n f'Statistics cache already contains {existing_stats} entries. '\n 'Use --force to update anyway.'\n )\n )\n return\n \n # Check if there are users with ACL links\n users_with_links = User.objects.filter(acl__isnull=False).distinct().count()\n total_links = ACLLink.objects.count()\n \n self.stdout.write(\n f'Found {users_with_links} users with {total_links} ACL links total'\n )\n \n if total_links == 0:\n self.stdout.write(\n self.style.WARNING('No ACL links found. Nothing to process.')\n )\n return\n \n if options['async']:\n # Run as async Celery task\n try:\n task = update_user_statistics.delay()\n self.stdout.write(\n self.style.SUCCESS(\n f'Statistics update task started. Task ID: {task.id}'\n )\n )\n self.stdout.write(\n 'Check admin panel Task Execution Logs for progress.'\n )\n except Exception as e:\n self.stdout.write(\n self.style.ERROR(f'Failed to start async task: {e}')\n )\n else:\n # Run synchronously\n self.stdout.write('Starting synchronous statistics update...')\n \n try:\n # Import and call the task function directly\n from vpn.tasks import update_user_statistics\n \n # Create a mock Celery request object for the task\n class MockRequest:\n id = f'manual-{timezone.now().isoformat()}'\n retries = 0\n \n # Create mock task instance\n task_instance = type('MockTask', (), {\n 'request': MockRequest(),\n })()\n \n # Call the task function directly\n result = update_user_statistics(task_instance)\n \n self.stdout.write(\n self.style.SUCCESS(f'Statistics update completed: {result}')\n )\n \n # Show summary\n final_stats = UserStatistics.objects.count()\n self.stdout.write(\n self.style.SUCCESS(\n f'Statistics cache now contains {final_stats} entries'\n )\n )\n \n except Exception as e:\n self.stdout.write(\n self.style.ERROR(f'Statistics update failed: {e}')\n )\n import traceback\n self.stdout.write(traceback.format_exc())\n

View File

@@ -0,0 +1,45 @@
# Generated migration for UserStatistics model
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('vpn', '0004_merge_20250721_1223'),
]
operations = [
migrations.CreateModel(
name='UserStatistics',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('server_name', models.CharField(max_length=256)),
('acl_link_id', models.CharField(blank=True, help_text='None for server-level stats', max_length=1024, null=True)),
('total_connections', models.IntegerField(default=0)),
('recent_connections', models.IntegerField(default=0)),
('daily_usage', models.JSONField(default=list, help_text='Daily connection counts for last 30 days')),
('max_daily', models.IntegerField(default=0)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'User Statistics',
'verbose_name_plural': 'User Statistics',
},
),
migrations.AddIndex(
model_name='userstatistics',
index=models.Index(fields=['user', 'server_name'], name='vpn_usersta_user_id_1c7cd0_idx'),
),
migrations.AddIndex(
model_name='userstatistics',
index=models.Index(fields=['updated_at'], name='vpn_usersta_updated_8e6e9b_idx'),
),
migrations.AlterUniqueTogether(
name='userstatistics',
unique_together={('user', 'server_name', 'acl_link_id')},
),
]

View File

@@ -11,6 +11,31 @@ from django.contrib.auth.models import AbstractUser
logger = logging.getLogger(__name__)
class UserStatistics(models.Model):
user = models.ForeignKey('User', on_delete=models.CASCADE)
server_name = models.CharField(max_length=256)
acl_link_id = models.CharField(max_length=1024, null=True, blank=True, help_text="None for server-level stats")
total_connections = models.IntegerField(default=0)
recent_connections = models.IntegerField(default=0)
daily_usage = models.JSONField(default=list, help_text="Daily connection counts for last 30 days")
max_daily = models.IntegerField(default=0)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ['user', 'server_name', 'acl_link_id']
verbose_name = 'User Statistics'
verbose_name_plural = 'User Statistics'
indexes = [
models.Index(fields=['user', 'server_name']),
models.Index(fields=['updated_at']),
]
def __str__(self):
link_part = f" (link: {self.acl_link_id})" if self.acl_link_id else " (server total)"
return f"{self.user.username} - {self.server_name}{link_part}"
class TaskExecutionLog(models.Model):
task_id = models.CharField(max_length=255, help_text="Celery task ID")
task_name = models.CharField(max_length=100, help_text="Task name")

View File

@@ -215,6 +215,131 @@ def sync_server(self, id):
create_task_log(task_id, "sync_server_info", "Server sync failed after retries", 'FAILURE', server=server, message=error_message, execution_time=time.time() - start_time)
return {"error": error_message}
@shared_task(name="update_user_statistics", bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3, 'countdown': 60})
def update_user_statistics(self):
"""Update cached user statistics from AccessLog data"""
from .models import User, AccessLog, UserStatistics, ACLLink
from django.utils import timezone
from datetime import timedelta
from django.db.models import Count, Q
from django.db import transaction
start_time = time.time()
task_id = self.request.id
create_task_log(task_id, "update_user_statistics", "Starting statistics update", 'STARTED')
try:
now = timezone.now()
thirty_days_ago = now - timedelta(days=30)
# Get all users with ACL links
users_with_links = User.objects.filter(acl__isnull=False).distinct()
total_users = users_with_links.count()
create_task_log(task_id, "update_user_statistics", f"Found {total_users} users to process", 'STARTED')
logger.info(f"Updating statistics for {total_users} users")
updated_count = 0
with transaction.atomic():
for user in users_with_links:
logger.debug(f"Processing user {user.username}")
# Get all ACL links for this user
acl_links = ACLLink.objects.filter(acl__user=user).select_related('acl__server')
for link in acl_links:
server_name = link.acl.server.name
# Calculate total connections for this server (all time)
total_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success'
).count()
# Calculate recent connections (last 30 days)
recent_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success',
timestamp__gte=thirty_days_ago
).count()
# Generate daily usage data for the last 30 days
daily_usage = []
max_daily = 0
for i in range(30):
day_start = (now - timedelta(days=29-i)).replace(hour=0, minute=0, second=0, microsecond=0)
day_end = day_start + timedelta(days=1)
day_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success',
timestamp__gte=day_start,
timestamp__lt=day_end
).count()
daily_usage.append(day_connections)
max_daily = max(max_daily, day_connections)
# Update or create statistics for this link
stats, created = UserStatistics.objects.update_or_create(
user=user,
server_name=server_name,
acl_link_id=link.link,
defaults={
'total_connections': total_connections,
'recent_connections': recent_connections,
'daily_usage': daily_usage,
'max_daily': max_daily,
}
)
action = "created" if created else "updated"
logger.debug(f"{action} stats for {user.username} on {server_name} (link: {link.link})")
updated_count += 1
logger.debug(f"Completed processing user {user.username}")
success_message = f"Successfully updated statistics for {updated_count} user-server-link combinations"
logger.info(success_message)
create_task_log(
task_id,
"update_user_statistics",
"Statistics update completed",
'SUCCESS',
message=success_message,
execution_time=time.time() - start_time
)
return success_message
except Exception as e:
error_message = f"Error updating user statistics: {e}"
logger.error(error_message, exc_info=True)
if self.request.retries < 3:
retry_message = f"Retrying statistics update (attempt {self.request.retries + 1})"
logger.info(retry_message)
create_task_log(task_id, "update_user_statistics", "Retrying statistics update", 'RETRY', message=retry_message)
raise self.retry(countdown=60)
create_task_log(
task_id,
"update_user_statistics",
"Statistics update failed after retries",
'FAILURE',
message=error_message,
execution_time=time.time() - start_time
)
raise
@shared_task(name="sync_user_on_server", bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 5, 'countdown': 30})
def sync_user(self, user_id, server_id):
from .models import User, ACL

View File

@@ -236,6 +236,22 @@
flex-wrap: wrap;
}
.last-used {
color: #9ca3af;
font-size: 0.8rem;
background: rgba(156, 163, 175, 0.1);
padding: 3px 8px;
border-radius: 10px;
border: 1px solid rgba(156, 163, 175, 0.2);
transition: all 0.3s ease;
}
.last-used:hover {
background: rgba(156, 163, 175, 0.2);
border-color: rgba(156, 163, 175, 0.4);
transform: scale(1.05);
}
.usage-count {
color: #9ca3af;
font-size: 0.8rem;
@@ -451,7 +467,11 @@
</div>
</div>
<div class="stats-info">
<p>📊 Statistics are updated in real-time and show your connection history</p>
{% if total_connections == 0 and total_links > 0 %}
<p>📊 Statistics cache is empty. Run update in Admin → Task Execution Logs</p>
{% else %}
<p>📊 Statistics are updated every 3 hours and show your connection history</p>
{% endif %}
</div>
</div>
@@ -492,6 +512,7 @@
<div class="link-stats">
<span class="usage-count">✨ {{ link_data.connections }} uses</span>
<span class="recent-count">📅 {{ link_data.recent_connections }} last 30 days</span>
<span class="last-used">🕒 {{ link_data.last_access_display }}</span>
</div>
</div>
<div class="usage-chart" data-usage="{{ link_data.daily_usage|join:',' }}" data-max="{{ link_data.max_daily }}">

View File

@@ -1,8 +1,8 @@
def userPortal(request, user_hash):
"""HTML portal for user to view their VPN access links and server information"""
from .models import User, ACLLink, AccessLog
from .models import User, ACLLink, UserStatistics, AccessLog
from django.utils import timezone
from datetime import datetime, timedelta
from datetime import timedelta
import logging
logger = logging.getLogger(__name__)
@@ -22,24 +22,17 @@ def userPortal(request, user_hash):
acl_links = ACLLink.objects.filter(acl__user=user).select_related('acl__server', 'acl')
logger.info(f"Found {acl_links.count()} ACL links for user {user.username}")
# Calculate date ranges for statistics
now = timezone.now()
thirty_days_ago = now - timedelta(days=30)
logger.debug(f"Calculating stats from {thirty_days_ago} to {now}")
# Calculate total connection statistics
total_connections = AccessLog.objects.filter(
user=user.username,
action='Success'
).count()
recent_connections = AccessLog.objects.filter(
user=user.username,
action='Success',
timestamp__gte=thirty_days_ago
).count()
logger.info(f"User {user.username} stats: total_connections={total_connections}, recent_connections={recent_connections}")
# Calculate overall statistics from cached data (only where cache exists)
user_stats = UserStatistics.objects.filter(user=user)
if user_stats.exists():
total_connections = sum(stat.total_connections for stat in user_stats)
recent_connections = sum(stat.recent_connections for stat in user_stats)
logger.info(f"User {user.username} cached stats: total_connections={total_connections}, recent_connections={recent_connections}")
else:
# No cache available, set to zero and suggest cache update
total_connections = 0
recent_connections = 0
logger.warning(f"No cached statistics found for user {user.username}. Run statistics update task.")
# Group links by server
servers_data = {}
@@ -49,7 +42,6 @@ def userPortal(request, user_hash):
server = link.acl.server
server_name = server.name
logger.debug(f"Processing link {link.link} for server {server_name}")
logger.debug(f"Link last_access_time: {link.last_access_time}")
if server_name not in servers_data:
# Get server status and info
@@ -64,12 +56,12 @@ def userPortal(request, user_hash):
server_accessible = False
server_error = str(e)
# Calculate server-specific connection stats
server_total_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success'
).count()
# Calculate server-level totals from cached stats (only where cache exists)
server_stats = user_stats.filter(server_name=server_name)
if server_stats.exists():
server_total_connections = sum(stat.total_connections for stat in server_stats)
else:
server_total_connections = 0
servers_data[server_name] = {
'server': server,
@@ -80,45 +72,52 @@ def userPortal(request, user_hash):
'server_type': server.server_type,
'total_connections': server_total_connections,
}
logger.debug(f"Created server data for {server_name} with {server_total_connections} connections")
logger.debug(f"Created server data for {server_name} with {server_total_connections} cached connections")
# Calculate link-specific statistics
# Note: AccessLog doesn't have link-specific tracking, so we'll use server-based stats
link_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success'
).count()
# Calculate time since last access
last_access_display = "Never used"
if link.last_access_time:
time_diff = timezone.now() - link.last_access_time
if time_diff.days > 0:
last_access_display = f"{time_diff.days} days ago"
elif time_diff.seconds > 3600:
hours = time_diff.seconds // 3600
last_access_display = f"{hours} hours ago"
elif time_diff.seconds > 60:
minutes = time_diff.seconds // 60
last_access_display = f"{minutes} minutes ago"
else:
last_access_display = "Just now"
link_recent_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success',
timestamp__gte=thirty_days_ago
).count()
# Generate daily usage data for the last 30 days
daily_usage = []
max_daily = 0
for i in range(30):
day_start = (now - timedelta(days=29-i)).replace(hour=0, minute=0, second=0, microsecond=0)
day_end = day_start + timedelta(days=1)
# Get cached statistics for this specific link
try:
link_stats = UserStatistics.objects.get(
user=user,
server_name=server_name,
acl_link_id=link.link
)
logger.debug(f"Found cached stats for link {link.link}: {link_stats.total_connections} connections, max_daily={link_stats.max_daily}")
day_connections = AccessLog.objects.filter(
user=user.username,
server=server_name,
action='Success',
timestamp__gte=day_start,
timestamp__lt=day_end
).count()
link_connections = link_stats.total_connections
link_recent_connections = link_stats.recent_connections
daily_usage = link_stats.daily_usage or []
max_daily = link_stats.max_daily
daily_usage.append(day_connections)
max_daily = max(max_daily, day_connections)
except UserStatistics.DoesNotExist:
logger.warning(f"No cached stats found for link {link.link} on server {server_name}, using fallback")
# Fallback: Since AccessLog doesn't track specific links, show zero for link-specific stats
# but keep server-level stats for context
link_connections = 0
link_recent_connections = 0
daily_usage = [0] * 30 # Empty 30-day chart
max_daily = 0
logger.warning(f"Using zero stats for uncached link {link.link} - AccessLog doesn't track individual links")
logger.debug(f"Link {link.link} stats: connections={link_connections}, recent={link_recent_connections}, max_daily={max_daily}")
# Add link information with comprehensive statistics
# Add link information with statistics
link_url = f"{EXTERNAL_ADDRESS}/ss/{link.link}#{server_name}"
link_data = {
@@ -126,6 +125,7 @@ def userPortal(request, user_hash):
'url': link_url,
'comment': link.comment or 'Default',
'last_access': link.last_access_time,
'last_access_display': last_access_display,
'connections': link_connections,
'recent_connections': link_recent_connections,
'daily_usage': daily_usage,
@@ -152,13 +152,12 @@ def userPortal(request, user_hash):
logger.debug(f"Context prepared with keys: {list(context.keys())}")
logger.debug(f"Servers in context: {list(servers_data.keys())}")
logger.debug(f"Final context values: total_connections={context['total_connections']}, recent_connections={context['recent_connections']}")
# Log sample server data for debugging
for server_name, server_data in servers_data.items():
logger.debug(f"Server {server_name}: total_connections={server_data['total_connections']}, links_count={len(server_data['links'])}")
for i, link_data in enumerate(server_data['links']):
logger.debug(f" Link {i}: connections={link_data['connections']}, recent={link_data['recent_connections']}, daily_usage_len={len(link_data['daily_usage'])}")
logger.debug(f" Link {i}: connections={link_data['connections']}, recent={link_data['recent_connections']}, last_access='{link_data['last_access_display']}'")
return render(request, 'vpn/user_portal.html', context)