mirror of
https://github.com/house-of-vanity/OutFleet.git
synced 2025-12-16 17:37:51 +00:00
init rust. WIP: tls for inbounds
This commit is contained in:
31
.env.example
Normal file
31
.env.example
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Environment Variables Example for Xray Admin Panel
|
||||||
|
# Copy this file to .env and modify the values as needed
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
DATABASE_URL=postgresql://xray_admin:password@localhost:5432/xray_admin
|
||||||
|
XRAY_ADMIN__DATABASE__MAX_CONNECTIONS=20
|
||||||
|
XRAY_ADMIN__DATABASE__CONNECTION_TIMEOUT=30
|
||||||
|
XRAY_ADMIN__DATABASE__AUTO_MIGRATE=true
|
||||||
|
|
||||||
|
# Web Server Configuration
|
||||||
|
XRAY_ADMIN__WEB__HOST=0.0.0.0
|
||||||
|
XRAY_ADMIN__WEB__PORT=8080
|
||||||
|
XRAY_ADMIN__WEB__JWT_SECRET=your-super-secret-jwt-key-change-this
|
||||||
|
XRAY_ADMIN__WEB__JWT_EXPIRY=86400
|
||||||
|
|
||||||
|
# Telegram Bot Configuration
|
||||||
|
TELEGRAM_BOT_TOKEN=1234567890:ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi
|
||||||
|
XRAY_ADMIN__TELEGRAM__WEBHOOK_URL=https://your-domain.com/telegram/webhook
|
||||||
|
|
||||||
|
# Xray Configuration
|
||||||
|
XRAY_ADMIN__XRAY__DEFAULT_API_PORT=62789
|
||||||
|
XRAY_ADMIN__XRAY__HEALTH_CHECK_INTERVAL=30
|
||||||
|
|
||||||
|
# Logging Configuration
|
||||||
|
XRAY_ADMIN__LOGGING__LEVEL=info
|
||||||
|
XRAY_ADMIN__LOGGING__FILE_PATH=./logs/xray-admin.log
|
||||||
|
XRAY_ADMIN__LOGGING__JSON_FORMAT=false
|
||||||
|
|
||||||
|
# Runtime Environment
|
||||||
|
RUST_ENV=development
|
||||||
|
ENVIRONMENT=development
|
||||||
51
.github/workflows/main.yml
vendored
51
.github/workflows/main.yml
vendored
@@ -1,51 +0,0 @@
|
|||||||
name: Docker hub build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'django'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
-
|
|
||||||
name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
-
|
|
||||||
name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set outputs
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
echo "sha_full=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
echo "build_date=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
|
|
||||||
echo "branch_name=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
|
||||||
- name: Check outputs
|
|
||||||
run: |
|
|
||||||
echo "Short SHA: ${{ steps.vars.outputs.sha_short }}"
|
|
||||||
echo "Full SHA: ${{ steps.vars.outputs.sha_full }}"
|
|
||||||
echo "Build Date: ${{ steps.vars.outputs.build_date }}"
|
|
||||||
echo "Branch: ${{ steps.vars.outputs.branch_name }}"
|
|
||||||
-
|
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
cache-from: type=registry,ref=ultradesu/outfleet:buildcache
|
|
||||||
cache-to: type=registry,ref=ultradesu/outfleet:buildcache,mode=max
|
|
||||||
build-args: |
|
|
||||||
GIT_COMMIT=${{ steps.vars.outputs.sha_full }}
|
|
||||||
GIT_COMMIT_SHORT=${{ steps.vars.outputs.sha_short }}
|
|
||||||
BUILD_DATE=${{ steps.vars.outputs.build_date }}
|
|
||||||
BRANCH_NAME=${{ steps.vars.outputs.branch_name }}
|
|
||||||
tags: ultradesu/outfleet:v2,ultradesu/outfleet:${{ steps.vars.outputs.sha_short }}
|
|
||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -1,21 +1,10 @@
|
|||||||
db.sqlite3
|
|
||||||
debug.log
|
|
||||||
*.swp
|
*.swp
|
||||||
*.swo
|
*.swo
|
||||||
*.pyc
|
|
||||||
staticfiles/
|
/target/
|
||||||
*.__pycache__.*
|
config.toml
|
||||||
celerybeat-schedule*
|
|
||||||
|
|
||||||
# macOS system files
|
# macOS system files
|
||||||
._*
|
._*
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
# Virtual environments
|
|
||||||
venv/
|
|
||||||
.venv/
|
|
||||||
env/
|
|
||||||
|
|
||||||
# Temporary files
|
|
||||||
/tmp/
|
|
||||||
*.tmp
|
|
||||||
|
|||||||
64
.vscode/launch.json
vendored
64
.vscode/launch.json
vendored
@@ -1,64 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "Django VPN app",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings",
|
|
||||||
"EXTERNAL_ADDRESS": "http://localhost:8000"
|
|
||||||
},
|
|
||||||
"args": [
|
|
||||||
"runserver",
|
|
||||||
"0.0.0.0:8000"
|
|
||||||
],
|
|
||||||
"django": true,
|
|
||||||
"autoStartBrowser": false,
|
|
||||||
"program": "${workspaceFolder}/manage.py"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Celery Worker",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"module": "celery",
|
|
||||||
"args": [
|
|
||||||
"-A", "mysite",
|
|
||||||
"worker",
|
|
||||||
"--loglevel=info"
|
|
||||||
],
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings"
|
|
||||||
},
|
|
||||||
"console": "integratedTerminal"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Celery Beat",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"module": "celery",
|
|
||||||
"args": [
|
|
||||||
"-A", "mysite",
|
|
||||||
"beat",
|
|
||||||
"--loglevel=info"
|
|
||||||
],
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings"
|
|
||||||
},
|
|
||||||
"console": "integratedTerminal"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"compounds": [
|
|
||||||
{
|
|
||||||
"name": "Run Django, Celery Worker, and Celery Beat",
|
|
||||||
"configurations": [
|
|
||||||
"Django VPN app",
|
|
||||||
"Celery Worker",
|
|
||||||
"Celery Beat"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
73
API.md
Normal file
73
API.md
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# User Management API
|
||||||
|
|
||||||
|
Base URL: `http://localhost:8080/api`
|
||||||
|
|
||||||
|
## Endpoints
|
||||||
|
|
||||||
|
### Health Check
|
||||||
|
- `GET /` - Service health check
|
||||||
|
|
||||||
|
### Users
|
||||||
|
|
||||||
|
#### List Users
|
||||||
|
- `GET /users?page=1&per_page=20` - Get paginated list of users
|
||||||
|
|
||||||
|
#### Search Users
|
||||||
|
- `GET /users/search?q=john&page=1&per_page=20` - Search users by name
|
||||||
|
|
||||||
|
#### Get User
|
||||||
|
- `GET /users/{id}` - Get user by ID
|
||||||
|
|
||||||
|
#### Create User
|
||||||
|
- `POST /users` - Create new user
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "John Doe",
|
||||||
|
"comment": "Admin user",
|
||||||
|
"telegram_id": 123456789
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update User
|
||||||
|
- `PUT /users/{id}` - Update user by ID
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Jane Doe",
|
||||||
|
"comment": null,
|
||||||
|
"telegram_id": 987654321
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete User
|
||||||
|
- `DELETE /users/{id}` - Delete user by ID
|
||||||
|
|
||||||
|
## Response Format
|
||||||
|
|
||||||
|
### User Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"comment": "string|null",
|
||||||
|
"telegram_id": "number|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Users List Response
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"users": [UserObject],
|
||||||
|
"total": 100,
|
||||||
|
"page": 1,
|
||||||
|
"per_page": 20
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Status Codes
|
||||||
|
- `200` - Success
|
||||||
|
- `201` - Created
|
||||||
|
- `404` - Not Found
|
||||||
|
- `409` - Conflict (duplicate telegram_id)
|
||||||
|
- `500` - Internal Server Error
|
||||||
4252
Cargo.lock
generated
Normal file
4252
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
59
Cargo.toml
Normal file
59
Cargo.toml
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
[package]
|
||||||
|
name = "xray-admin"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# Async runtime
|
||||||
|
tokio = { version = "1.0", features = ["full"] }
|
||||||
|
tokio-cron-scheduler = "0.10"
|
||||||
|
|
||||||
|
# Serialization/deserialization
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
serde_yaml = "0.9"
|
||||||
|
toml = "0.8"
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
config = "0.14"
|
||||||
|
clap = { version = "4.0", features = ["derive", "env"] }
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
anyhow = "1.0"
|
||||||
|
thiserror = "1.0"
|
||||||
|
|
||||||
|
# Validation
|
||||||
|
validator = { version = "0.18", features = ["derive"] }
|
||||||
|
|
||||||
|
# URL parsing
|
||||||
|
url = "2.5"
|
||||||
|
|
||||||
|
# Database and ORM
|
||||||
|
sea-orm = { version = "1.0", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "with-uuid"] }
|
||||||
|
sea-orm-migration = "1.0"
|
||||||
|
|
||||||
|
# Additional utilities
|
||||||
|
uuid = { version = "1.0", features = ["v4", "serde"] }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
async-trait = "0.1"
|
||||||
|
log = "0.4"
|
||||||
|
urlencoding = "2.1"
|
||||||
|
|
||||||
|
# Web server
|
||||||
|
axum = { version = "0.7", features = ["macros", "json"] }
|
||||||
|
tower = "0.4"
|
||||||
|
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||||
|
hyper = { version = "1.0", features = ["full"] }
|
||||||
|
|
||||||
|
# Xray integration
|
||||||
|
xray-core = "0.2.1" # gRPC client for Xray
|
||||||
|
tonic = "0.12" # gRPC client/server framework
|
||||||
|
prost = "0.13" # Protocol Buffers implementation
|
||||||
|
rcgen = "0.12" # For self-signed certificates
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.0"
|
||||||
40
Dockerfile
40
Dockerfile
@@ -1,40 +0,0 @@
|
|||||||
FROM python:3-alpine
|
|
||||||
|
|
||||||
# Build arguments
|
|
||||||
ARG GIT_COMMIT="development"
|
|
||||||
ARG GIT_COMMIT_SHORT="dev"
|
|
||||||
ARG BUILD_DATE="unknown"
|
|
||||||
ARG BRANCH_NAME="unknown"
|
|
||||||
|
|
||||||
# Environment variables from build args
|
|
||||||
ENV GIT_COMMIT=${GIT_COMMIT}
|
|
||||||
ENV GIT_COMMIT_SHORT=${GIT_COMMIT_SHORT}
|
|
||||||
ENV BUILD_DATE=${BUILD_DATE}
|
|
||||||
ENV BRANCH_NAME=${BRANCH_NAME}
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install system dependencies first (this layer will be cached)
|
|
||||||
RUN apk update && apk add git curl unzip
|
|
||||||
|
|
||||||
# Copy and install Python dependencies (this layer will be cached when requirements.txt doesn't change)
|
|
||||||
COPY ./requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Install Xray-core
|
|
||||||
RUN XRAY_VERSION=$(curl -s https://api.github.com/repos/XTLS/Xray-core/releases/latest | sed -n 's/.*"tag_name": "\([^"]*\)".*/\1/p') && \
|
|
||||||
curl -L -o /tmp/xray.zip "https://github.com/XTLS/Xray-core/releases/download/${XRAY_VERSION}/Xray-linux-64.zip" && \
|
|
||||||
cd /tmp && unzip xray.zip && \
|
|
||||||
ls -la /tmp/ && \
|
|
||||||
find /tmp -name "xray" -type f && \
|
|
||||||
cp xray /usr/local/bin/xray && \
|
|
||||||
chmod +x /usr/local/bin/xray && \
|
|
||||||
rm -rf /tmp/xray.zip /tmp/xray
|
|
||||||
|
|
||||||
# Copy the rest of the application code (this layer will change frequently)
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Run collectstatic
|
|
||||||
RUN python manage.py collectstatic --noinput
|
|
||||||
|
|
||||||
CMD [ "python", "./manage.py", "runserver", "0.0.0.0:8000" ]
|
|
||||||
13
LICENSE
13
LICENSE
@@ -1,13 +0,0 @@
|
|||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
Version 2, December 2004
|
|
||||||
|
|
||||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim or modified
|
|
||||||
copies of this license document, and changing it is allowed as long
|
|
||||||
as the name is changed.
|
|
||||||
|
|
||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
|
||||||
|
|
||||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
|
||||||
132
LLM_PROJECT_CONTEXT.md
Normal file
132
LLM_PROJECT_CONTEXT.md
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# LLM Project Context - Xray Admin Panel
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
Rust-based administration panel for managing xray-core VPN proxy servers. Uses real gRPC integration with xray-core library for server communication.
|
||||||
|
|
||||||
|
## Current Architecture
|
||||||
|
|
||||||
|
### Core Technologies
|
||||||
|
- **Language**: Rust (edition 2021)
|
||||||
|
- **Web Framework**: Axum with tower-http
|
||||||
|
- **Database**: PostgreSQL with Sea-ORM
|
||||||
|
- **Xray Integration**: xray-core 0.2.1 library with real gRPC communication
|
||||||
|
- **Frontend**: Vanilla HTML/CSS/JS with toast notifications
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
├── config/ # Configuration management (args, env, file)
|
||||||
|
├── database/ # Sea-ORM entities, repositories, migrations
|
||||||
|
├── services/ # Business logic (xray gRPC client, certificates)
|
||||||
|
├── web/ # Axum handlers and routes
|
||||||
|
└── main.rs # Application entry point
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Features Implemented
|
||||||
|
|
||||||
|
### 1. Database Entities
|
||||||
|
- **Users**: Basic user management
|
||||||
|
- **Servers**: Xray server definitions with gRPC endpoints
|
||||||
|
- **Certificates**: TLS certificates with PEM storage (binary format)
|
||||||
|
- **InboundTemplates**: Reusable inbound configurations
|
||||||
|
- **ServerInbounds**: Template bindings to servers with ports/certificates
|
||||||
|
|
||||||
|
### 2. Xray gRPC Integration
|
||||||
|
**Location**: `src/services/xray/client.rs`
|
||||||
|
- Real xray-core library integration (NOT mock/CLI)
|
||||||
|
- Methods: `add_inbound_with_certificate()`, `remove_inbound()`, `get_stats()`
|
||||||
|
- **CRITICAL**: TLS certificate configuration via streamSettings with proper protobuf messages
|
||||||
|
- Supports VLESS, VMess, Trojan, Shadowsocks protocols
|
||||||
|
|
||||||
|
### 3. Certificate Management
|
||||||
|
**Location**: `src/database/entities/certificate.rs`
|
||||||
|
- Self-signed certificate generation using rcgen
|
||||||
|
- Binary storage (cert_data, key_data as Vec<u8>)
|
||||||
|
- PEM conversion methods: `certificate_pem()`, `private_key_pem()`
|
||||||
|
- Separate endpoints: `/certificates/{id}` (basic) and `/certificates/{id}/details` (with PEM)
|
||||||
|
|
||||||
|
### 4. Template-Based Architecture
|
||||||
|
Templates define reusable inbound configurations that can be bound to servers with:
|
||||||
|
- Port overrides
|
||||||
|
- Certificate assignments
|
||||||
|
- Active/inactive states
|
||||||
|
|
||||||
|
## Current Status & Issues
|
||||||
|
|
||||||
|
### ✅ Working Features
|
||||||
|
- Complete CRUD for all entities
|
||||||
|
- Real xray gRPC communication with TLS certificate support
|
||||||
|
- Toast notification system (absolute positioning)
|
||||||
|
- Modal-based editing interface
|
||||||
|
- Password masking in database URL logging
|
||||||
|
- Certificate details display with PEM content
|
||||||
|
|
||||||
|
### 🔧 Recent Fixes
|
||||||
|
- **StreamConfig Integration**: Fixed TLS certificate configuration in xray gRPC calls
|
||||||
|
- **Certificate Display**: Added `/certificates/{id}/details` endpoint for PEM viewing
|
||||||
|
- **Active/Inactive Management**: Inbounds automatically added/removed from xray when toggled
|
||||||
|
|
||||||
|
### ⚠️ Current Issue
|
||||||
|
User reported certificate details still showing "Not available" - this was just fixed with the new `/certificates/{id}/details` endpoint.
|
||||||
|
|
||||||
|
## API Structure
|
||||||
|
|
||||||
|
### Endpoints
|
||||||
|
```
|
||||||
|
/api/users/* # User management
|
||||||
|
/api/servers/* # Server management
|
||||||
|
/api/servers/{id}/inbounds/* # Server inbound management
|
||||||
|
/api/certificates/* # Certificate management (basic)
|
||||||
|
/api/certificates/{id}/details # Certificate details with PEM
|
||||||
|
/api/templates/* # Template management
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
- **Default port**: 8080 (user tested on 8082)
|
||||||
|
- **Database**: PostgreSQL with auto-migration
|
||||||
|
- **Environment variables**: XRAY_ADMIN__* prefix
|
||||||
|
- **Config file**: config.toml support
|
||||||
|
|
||||||
|
## Testing Commands
|
||||||
|
```bash
|
||||||
|
# Run application
|
||||||
|
cargo run -- --host 0.0.0.0 --port 8082
|
||||||
|
|
||||||
|
# Test xray integration
|
||||||
|
xray api lsi --server 100.91.97.36:10085
|
||||||
|
|
||||||
|
# Check compilation
|
||||||
|
cargo check
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Implementation Details
|
||||||
|
|
||||||
|
### Xray TLS Configuration
|
||||||
|
**Location**: `src/services/xray/client.rs:185-194`
|
||||||
|
```rust
|
||||||
|
let stream_config = StreamConfig {
|
||||||
|
protocol_name: "tcp".to_string(),
|
||||||
|
security_type: "tls".to_string(),
|
||||||
|
security_settings: vec![tls_message],
|
||||||
|
// ... other fields
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Certificate Data Flow
|
||||||
|
1. User creates certificate via web interface
|
||||||
|
2. PEM data stored as binary in database (cert_data, key_data)
|
||||||
|
3. When creating inbound, certificate fetched and converted back to PEM
|
||||||
|
4. PEM passed to xray gRPC client for TLS configuration
|
||||||
|
|
||||||
|
### Database Migrations
|
||||||
|
Auto-migration enabled by default. All entities use UUID primary keys with timestamps.
|
||||||
|
|
||||||
|
## Development Notes
|
||||||
|
- **User prefers English in code/comments**
|
||||||
|
- **No emoji usage unless explicitly requested**
|
||||||
|
- **Prefer editing existing files over creating new ones**
|
||||||
|
- **Real xray-core integration required** (user specifically asked not to abandon it)
|
||||||
|
- **Application tested with actual xray server at 100.91.97.36:10085**
|
||||||
|
|
||||||
|
## Last Working State
|
||||||
|
All features implemented and compiling. StreamConfig properly configured for TLS certificate transmission to xray servers. Certificate viewing endpoint fixed for PEM display.
|
||||||
58
README.md
58
README.md
@@ -1,58 +0,0 @@
|
|||||||
<p align="center">
|
|
||||||
<h1 align="center">OutFleet: Master Your OutLine VPN</h1>
|
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
Streamline OutLine VPN experience. OutFleet offers centralized key control for many servers, users and always-updated Dynamic Access Keys instead of ss:// links
|
|
||||||
<br/>
|
|
||||||
<br/>
|
|
||||||
<a href="https://github.com/house-of-vanity/outfleet/issues">Request Feature</a>
|
|
||||||
</p>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
  
|
|
||||||
|
|
||||||
<img width="1282" height="840" alt="image" src="https://github.com/user-attachments/assets/3b66f928-853b-4af0-8968-1eacb2c16a1c" />
|
|
||||||
|
|
||||||
## About The Project
|
|
||||||
|
|
||||||
### Key Features
|
|
||||||
|
|
||||||
* Centralized Key Management
|
|
||||||
Administer user keys from one unified dashboard. Add, delete, and allocate users to specific servers effortlessly.
|
|
||||||
|
|
||||||
* 
|
|
||||||
Distribute ssconf:// links that are always up-to-date with your current server configurations. Eliminate the need for manual link updates.
|
|
||||||
|
|
||||||
### Why OutFleet?
|
|
||||||
Tired of juggling multiple home servers and the headache of individually managing users on each? OutFleet was born out of the frustration of not finding a suitable tool for efficiently managing a bunch of home servers.
|
|
||||||
|
|
||||||
## Built With
|
|
||||||
|
|
||||||
Django, Postgres SQL and hassle-free deployment using Kubernetes or docker-compose
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
#### Docker compose
|
|
||||||
Docker deploy is easy:
|
|
||||||
```
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
#### Kubernetes
|
|
||||||
I use ArgoCD for deployment. [Take a look](https://gt.hexor.cy/ab/homelab/src/branch/main/k8s/apps/vpn) to `outfleet.yaml` file for manifests.
|
|
||||||
|
|
||||||
|
|
||||||
#### Setup sslocal service on Windows
|
|
||||||
Shadowsocks servers can be used directly with **sslocal**. For automatic and regular password updates, you can create a Task Scheduler job to rotate the passwords when they change, as OutFleet manages the passwords automatically.
|
|
||||||
You may run script in Admin PowerShell to create Task for autorun **sslocal** and update connection details automatically using Outfleet API
|
|
||||||
```PowerShell
|
|
||||||
Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force; Invoke-Expression (Invoke-WebRequest -Uri "https://raw.githubusercontent.com/house-of-vanity/OutFleet/refs/heads/master/tools/windows-helper.ps1" -UseBasicParsing).Content
|
|
||||||
```
|
|
||||||
[Firefox PluginProxy Switcher and Manager](https://addons.mozilla.org/en-US/firefox/addon/proxy-switcher-and-manager/) && [Chrome plugin Proxy Switcher and Manager](https://chromewebstore.google.com/detail/proxy-switcher-and-manage/onnfghpihccifgojkpnnncpagjcdbjod)
|
|
||||||
|
|
||||||
Keep in mind that all user keys are stored in a single **config.yaml** file. If this file is lost, user keys will remain on the servers, but OutFleet will lose the ability to manage them. Handle with extreme caution and use backups.
|
|
||||||
|
|
||||||
## Authors
|
|
||||||
|
|
||||||
* **UltraDesu** - *Humble amateur developer* - [UltraDesu](https://github.com/house-of-vanity) - *Author*
|
|
||||||
* **Contributors**
|
|
||||||
* * @Sanapach
|
|
||||||
21
SECURITY.md
21
SECURITY.md
@@ -1,21 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
## Supported Versions
|
|
||||||
|
|
||||||
Use this section to tell people about which versions of your project are
|
|
||||||
currently being supported with security updates.
|
|
||||||
|
|
||||||
| Version | Supported |
|
|
||||||
| ------- | ------------------ |
|
|
||||||
| 5.1.x | :white_check_mark: |
|
|
||||||
| 5.0.x | :x: |
|
|
||||||
| 4.0.x | :white_check_mark: |
|
|
||||||
| < 4.0 | :x: |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
|
||||||
|
|
||||||
Use this section to tell people how to report a vulnerability.
|
|
||||||
|
|
||||||
Tell them where to go, how often they can expect to get an update on a
|
|
||||||
reported vulnerability, what to expect if the vulnerability is accepted or
|
|
||||||
declined, etc.
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
platforms:
|
|
||||||
- name: amd64
|
|
||||||
architecture: amd64
|
|
||||||
- name: arm64
|
|
||||||
architecture: arm64
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
-- Проверить количество записей без acl_link_id
|
|
||||||
SELECT COUNT(*) as total_without_link
|
|
||||||
FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = '';
|
|
||||||
|
|
||||||
-- Проверить общее количество записей
|
|
||||||
SELECT COUNT(*) as total_records FROM vpn_accesslog;
|
|
||||||
|
|
||||||
-- Показать распределение по датам (последние записи без ссылок)
|
|
||||||
SELECT DATE(timestamp) as date, COUNT(*) as count
|
|
||||||
FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = ''
|
|
||||||
GROUP BY DATE(timestamp)
|
|
||||||
ORDER BY date DESC
|
|
||||||
LIMIT 10;
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
-- ВАРИАНТ 1: Удалить ВСЕ записи без acl_link_id
|
|
||||||
-- ОСТОРОЖНО! Это удалит все старые логи
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = '';
|
|
||||||
|
|
||||||
-- ВАРИАНТ 2: Удалить записи без acl_link_id старше 30 дней
|
|
||||||
-- Более безопасный вариант
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 30 DAY;
|
|
||||||
|
|
||||||
-- ВАРИАНТ 3: Удалить записи без acl_link_id старше 7 дней
|
|
||||||
-- Еще более консервативный подход
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 7 DAY;
|
|
||||||
|
|
||||||
-- ВАРИАНТ 4: Оставить только последние 1000 записей без ссылок (для истории)
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND id NOT IN (
|
|
||||||
SELECT id FROM (
|
|
||||||
SELECT id FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = ''
|
|
||||||
ORDER BY timestamp DESC
|
|
||||||
LIMIT 1000
|
|
||||||
) AS recent_logs
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ВАРИАНТ 5: Поэтапное удаление (для больших БД)
|
|
||||||
-- Удаляем по 10000 записей за раз
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 30 DAY
|
|
||||||
LIMIT 10000;
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
services:
|
|
||||||
web_ui:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-web
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- EXTERNAL_ADDRESS=http://127.0.0.1:8000
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 1 &&
|
|
||||||
python manage.py makemigrations &&
|
|
||||||
python manage.py migrate &&
|
|
||||||
python manage.py create_admin &&
|
|
||||||
python manage.py runserver 0.0.0.0:8000"
|
|
||||||
|
|
||||||
worker:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-worker
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 3 && celery -A mysite worker"
|
|
||||||
|
|
||||||
beat:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-beat
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 3 && celery -A mysite beat"
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:15
|
|
||||||
container_name: postgres
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_DB: outfleet
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:7
|
|
||||||
container_name: redis
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "redis-cli", "ping"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
|
|
||||||
22
manage.py
22
manage.py
@@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""Django's command-line utility for administrative tasks."""
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run administrative tasks."""
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
try:
|
|
||||||
from django.core.management import execute_from_command_line
|
|
||||||
except ImportError as exc:
|
|
||||||
raise ImportError(
|
|
||||||
"Couldn't import Django. Are you sure it's installed and "
|
|
||||||
"available on your PYTHONPATH environment variable? Did you "
|
|
||||||
"forget to activate a virtual environment?"
|
|
||||||
) from exc
|
|
||||||
execute_from_command_line(sys.argv)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from .celery import app as celery_app
|
|
||||||
|
|
||||||
__all__ = ('celery_app',)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
ASGI config for mysite project.
|
|
||||||
|
|
||||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/5.1/howto/deployment/asgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.asgi import get_asgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
|
|
||||||
application = get_asgi_application()
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from celery import Celery
|
|
||||||
from celery import shared_task
|
|
||||||
from celery.schedules import crontab
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
app = Celery('mysite')
|
|
||||||
|
|
||||||
app.conf.beat_schedule = {
|
|
||||||
'periodical_servers_sync': {
|
|
||||||
'task': 'sync_all_servers',
|
|
||||||
'schedule': crontab(minute=0, hour='*/3'), # Every 3 hours
|
|
||||||
},
|
|
||||||
'cleanup_old_task_logs': {
|
|
||||||
'task': 'cleanup_task_logs',
|
|
||||||
'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
||||||
|
|
||||||
# Additional celery settings for better logging and performance
|
|
||||||
app.conf.update(
|
|
||||||
# Keep detailed results for debugging
|
|
||||||
result_expires=3600, # 1 hour
|
|
||||||
task_always_eager=False,
|
|
||||||
task_eager_propagates=True,
|
|
||||||
# Improve task tracking
|
|
||||||
task_track_started=True,
|
|
||||||
task_send_sent_event=True,
|
|
||||||
# Clean up settings
|
|
||||||
result_backend_cleanup_interval=300, # Clean up every 5 minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
app.autodiscover_tasks()
|
|
||||||
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
from django.conf import settings
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
def version_info(request):
|
|
||||||
"""Add version information to template context"""
|
|
||||||
|
|
||||||
git_commit = getattr(settings, 'GIT_COMMIT', None)
|
|
||||||
git_commit_short = getattr(settings, 'GIT_COMMIT_SHORT', None)
|
|
||||||
build_date = getattr(settings, 'BUILD_DATE', None)
|
|
||||||
|
|
||||||
if not git_commit or git_commit == 'development':
|
|
||||||
try:
|
|
||||||
base_dir = getattr(settings, 'BASE_DIR', Path(__file__).resolve().parent.parent)
|
|
||||||
result = subprocess.run(['git', 'rev-parse', 'HEAD'],
|
|
||||||
capture_output=True, text=True, cwd=base_dir, timeout=5)
|
|
||||||
if result.returncode == 0:
|
|
||||||
git_commit = result.stdout.strip()
|
|
||||||
git_commit_short = git_commit[:7]
|
|
||||||
|
|
||||||
date_result = subprocess.run(['git', 'log', '-1', '--format=%ci'],
|
|
||||||
capture_output=True, text=True, cwd=base_dir, timeout=5)
|
|
||||||
if date_result.returncode == 0:
|
|
||||||
build_date = date_result.stdout.strip()
|
|
||||||
except (subprocess.TimeoutExpired, subprocess.SubprocessError, FileNotFoundError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not git_commit:
|
|
||||||
git_commit = 'development'
|
|
||||||
if not git_commit_short:
|
|
||||||
git_commit_short = 'dev'
|
|
||||||
if not build_date:
|
|
||||||
build_date = 'unknown'
|
|
||||||
|
|
||||||
return {
|
|
||||||
'VERSION_INFO': {
|
|
||||||
'git_commit': git_commit,
|
|
||||||
'git_commit_short': git_commit_short,
|
|
||||||
'build_date': build_date,
|
|
||||||
'is_development': git_commit_short == 'dev'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from django.contrib.auth import authenticate, login
|
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
|
||||||
|
|
||||||
class RequestLogger:
|
|
||||||
def __init__(self, get_response):
|
|
||||||
self.get_response = get_response
|
|
||||||
|
|
||||||
def __call__(self, request):
|
|
||||||
print(f"Original: {request.build_absolute_uri()}")
|
|
||||||
print(f"Path : {request.path}")
|
|
||||||
|
|
||||||
response = self.get_response(request)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class AutoLoginMiddleware(MiddlewareMixin):
|
|
||||||
def process_request(self, request):
|
|
||||||
if not request.user.is_authenticated:
|
|
||||||
user = authenticate(username='admin', password='admin')
|
|
||||||
if user:
|
|
||||||
login(request, user)
|
|
||||||
@@ -1,233 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
import os
|
|
||||||
import environ
|
|
||||||
from django.core.management.utils import get_random_secret_key
|
|
||||||
|
|
||||||
|
|
||||||
ENV = environ.Env(
|
|
||||||
DEBUG=(bool, False)
|
|
||||||
)
|
|
||||||
|
|
||||||
environ.Env.read_env()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
|
||||||
SECRET_KEY=ENV('SECRET_KEY', default='django-insecure-change-me-in-production')
|
|
||||||
TIME_ZONE = ENV('TIMEZONE', default='Asia/Nicosia')
|
|
||||||
EXTERNAL_ADDRESS = ENV('EXTERNAL_ADDRESS', default='https://example.org')
|
|
||||||
|
|
||||||
CELERY_BROKER_URL = ENV('CELERY_BROKER_URL', default='redis://localhost:6379/0')
|
|
||||||
CELERY_RESULT_BACKEND = 'django-db'
|
|
||||||
CELERY_TIMEZONE = ENV('TIMEZONE', default='Asia/Nicosia')
|
|
||||||
CELERY_ACCEPT_CONTENT = ['json']
|
|
||||||
CELERY_TASK_SERIALIZER = 'json'
|
|
||||||
CELERY_RESULT_SERIALIZER = 'json'
|
|
||||||
CELERY_RESULT_EXTENDED = True
|
|
||||||
|
|
||||||
# Celery Beat Schedule
|
|
||||||
from celery.schedules import crontab
|
|
||||||
CELERY_BEAT_SCHEDULE = {
|
|
||||||
'update-user-statistics': {
|
|
||||||
'task': 'update_user_statistics',
|
|
||||||
'schedule': crontab(minute='*/5'), # Every 5 minutes
|
|
||||||
},
|
|
||||||
'cleanup-task-logs': {
|
|
||||||
'task': 'cleanup_task_logs',
|
|
||||||
'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
AUTH_USER_MODEL = "vpn.User"
|
|
||||||
|
|
||||||
DEBUG = ENV('DEBUG')
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = ENV.list('ALLOWED_HOSTS', default=["*"])
|
|
||||||
|
|
||||||
CORS_ALLOW_ALL_ORIGINS = True
|
|
||||||
CORS_ALLOW_CREDENTIALS = True
|
|
||||||
CSRF_TRUSTED_ORIGINS = ENV.list('CSRF_TRUSTED_ORIGINS', default=[])
|
|
||||||
|
|
||||||
STATIC_ROOT = BASE_DIR / "staticfiles"
|
|
||||||
|
|
||||||
LOGIN_REDIRECT_URL = '/'
|
|
||||||
|
|
||||||
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
|
||||||
|
|
||||||
LOGGING = {
|
|
||||||
'version': 1,
|
|
||||||
'disable_existing_loggers': False,
|
|
||||||
'formatters': {
|
|
||||||
'verbose': {
|
|
||||||
'format': '[{asctime}] {levelname} {name} {message}',
|
|
||||||
'style': '{',
|
|
||||||
},
|
|
||||||
'simple': {
|
|
||||||
'format': '{levelname} {message}',
|
|
||||||
'style': '{',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'handlers': {
|
|
||||||
'console': {
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'class': 'logging.StreamHandler',
|
|
||||||
'formatter': 'verbose',
|
|
||||||
},
|
|
||||||
'file': {
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'class': 'logging.FileHandler',
|
|
||||||
'filename': os.path.join(BASE_DIR, 'debug.log'),
|
|
||||||
'formatter': 'verbose',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'loggers': {
|
|
||||||
'django': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': True,
|
|
||||||
},
|
|
||||||
'vpn': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'telegram_bot': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'requests': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'urllib3': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
'jazzmin',
|
|
||||||
'django.contrib.admin',
|
|
||||||
'django.contrib.auth',
|
|
||||||
'django.contrib.contenttypes',
|
|
||||||
'django.contrib.sessions',
|
|
||||||
'django.contrib.messages',
|
|
||||||
'django.contrib.staticfiles',
|
|
||||||
'polymorphic',
|
|
||||||
'corsheaders',
|
|
||||||
'django_celery_results',
|
|
||||||
'django_celery_beat',
|
|
||||||
'vpn',
|
|
||||||
'telegram_bot',
|
|
||||||
]
|
|
||||||
|
|
||||||
MIDDLEWARE = [
|
|
||||||
'django.middleware.security.SecurityMiddleware',
|
|
||||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
|
||||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
|
||||||
'django.middleware.common.CommonMiddleware',
|
|
||||||
'django.middleware.csrf.CsrfViewMiddleware',
|
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
|
||||||
'mysite.middleware.AutoLoginMiddleware',
|
|
||||||
'django.contrib.messages.middleware.MessageMiddleware',
|
|
||||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
|
||||||
'corsheaders.middleware.CorsMiddleware',
|
|
||||||
|
|
||||||
]
|
|
||||||
|
|
||||||
ROOT_URLCONF = 'mysite.urls'
|
|
||||||
|
|
||||||
GIT_COMMIT = ENV('GIT_COMMIT', default='development')
|
|
||||||
GIT_COMMIT_SHORT = ENV('GIT_COMMIT_SHORT', default='dev')
|
|
||||||
BUILD_DATE = ENV('BUILD_DATE', default='unknown')
|
|
||||||
|
|
||||||
TEMPLATES = [
|
|
||||||
{
|
|
||||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
|
||||||
'DIRS': [
|
|
||||||
os.path.join(BASE_DIR, 'templates'),
|
|
||||||
os.path.join(BASE_DIR, 'vpn', 'templates')
|
|
||||||
],
|
|
||||||
'APP_DIRS': True,
|
|
||||||
'OPTIONS': {
|
|
||||||
'context_processors': [
|
|
||||||
'django.template.context_processors.debug',
|
|
||||||
'django.template.context_processors.request',
|
|
||||||
'django.contrib.auth.context_processors.auth',
|
|
||||||
'django.contrib.messages.context_processors.messages',
|
|
||||||
'mysite.context_processors.version_info',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
WSGI_APPLICATION = 'mysite.wsgi.application'
|
|
||||||
|
|
||||||
|
|
||||||
# Database
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#databases
|
|
||||||
|
|
||||||
# CREATE USER outfleet WITH PASSWORD 'password';
|
|
||||||
# GRANT ALL PRIVILEGES ON DATABASE outfleet TO outfleet;
|
|
||||||
# ALTER DATABASE outfleet OWNER TO outfleet;
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
'sqlite': {
|
|
||||||
'ENGINE': 'django.db.backends.sqlite3',
|
|
||||||
'NAME': BASE_DIR / 'db.sqlite3',
|
|
||||||
},
|
|
||||||
'default': {
|
|
||||||
'ENGINE': 'django.db.backends.postgresql',
|
|
||||||
'NAME': ENV('POSTGRES_DB', default="outfleet"),
|
|
||||||
'USER': ENV('POSTGRES_USER', default="outfleet"),
|
|
||||||
'PASSWORD': ENV('POSTGRES_PASSWORD', default="outfleet"),
|
|
||||||
'HOST': ENV('POSTGRES_HOST', default='localhost'),
|
|
||||||
'PORT': ENV('POSTGRES_PORT', default='5432'),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Password validation
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#auth-password-validators
|
|
||||||
|
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Internationalization
|
|
||||||
# https://docs.djangoproject.com/en/5.1/topics/i18n/
|
|
||||||
|
|
||||||
LANGUAGE_CODE = 'en-us'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
USE_I18N = True
|
|
||||||
|
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
|
||||||
# https://docs.djangoproject.com/en/5.1/howto/static-files/
|
|
||||||
|
|
||||||
STATIC_URL = '/static/'
|
|
||||||
STATICFILES_DIRS = [
|
|
||||||
BASE_DIR / 'static',
|
|
||||||
]
|
|
||||||
# Default primary key field type
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#default-auto-field
|
|
||||||
|
|
||||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
"""
|
|
||||||
URL configuration for mysite project.
|
|
||||||
|
|
||||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
|
||||||
https://docs.djangoproject.com/en/5.1/topics/http/urls/
|
|
||||||
Examples:
|
|
||||||
Function views
|
|
||||||
1. Add an import: from my_app import views
|
|
||||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
|
||||||
Class-based views
|
|
||||||
1. Add an import: from other_app.views import Home
|
|
||||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
|
||||||
Including another URLconf
|
|
||||||
1. Import the include() function: from django.urls import include, path
|
|
||||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
|
||||||
"""
|
|
||||||
from django.contrib import admin
|
|
||||||
from django.urls import path, include
|
|
||||||
from django.views.generic import RedirectView
|
|
||||||
from vpn.views import shadowsocks, userFrontend, userPortal, xray_subscription
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path('admin/', admin.site.urls),
|
|
||||||
path('ss/<path:link>', shadowsocks, name='shadowsocks'),
|
|
||||||
path('dynamic/<path:link>', shadowsocks, name='shadowsocks'),
|
|
||||||
path('xray/<str:user_hash>', xray_subscription, name='xray_subscription'),
|
|
||||||
path('stat/<path:user_hash>', userFrontend, name='userFrontend'),
|
|
||||||
path('u/<path:user_hash>', userPortal, name='userPortal'),
|
|
||||||
path('', RedirectView.as_view(url='/admin/', permanent=False)),
|
|
||||||
]
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
WSGI config for mysite project.
|
|
||||||
|
|
||||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/5.1/howto/deployment/wsgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
|
|
||||||
application = get_wsgi_application()
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
django-environ==0.12.0
|
|
||||||
Django==5.1.7
|
|
||||||
celery==5.4.0
|
|
||||||
django-jazzmin==3.0.1
|
|
||||||
django-polymorphic==3.1.0
|
|
||||||
django-cors-headers==4.5.0
|
|
||||||
django-celery-results==2.5.1
|
|
||||||
git+https://github.com/celery/django-celery-beat#egg=django-celery-beat
|
|
||||||
requests==2.32.3
|
|
||||||
PyYaml==6.0.2
|
|
||||||
Markdown==3.7
|
|
||||||
outline-vpn-api==6.3.0
|
|
||||||
Redis==5.2.1
|
|
||||||
whitenoise==6.9.0
|
|
||||||
psycopg2-binary==2.9.10
|
|
||||||
setuptools==75.2.0
|
|
||||||
shortuuid==1.0.13
|
|
||||||
cryptography==45.0.5
|
|
||||||
acme>=2.0.0
|
|
||||||
cloudflare>=4.3.1
|
|
||||||
josepy>=2.0.0
|
|
||||||
python-telegram-bot==21.10
|
|
||||||
60
src/config/args.rs
Normal file
60
src/config/args.rs
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
use clap::Parser;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "xray-admin")]
|
||||||
|
#[command(about = "A web admin panel for managing xray-core VPN proxy servers")]
|
||||||
|
#[command(version)]
|
||||||
|
pub struct Args {
|
||||||
|
/// Configuration file path
|
||||||
|
#[arg(short, long, value_name = "FILE")]
|
||||||
|
pub config: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Database connection URL
|
||||||
|
#[arg(long, env = "DATABASE_URL")]
|
||||||
|
pub database_url: Option<String>,
|
||||||
|
|
||||||
|
/// Web server host address
|
||||||
|
#[arg(long, default_value = "127.0.0.1")]
|
||||||
|
pub host: Option<String>,
|
||||||
|
|
||||||
|
/// Web server port
|
||||||
|
#[arg(short, long)]
|
||||||
|
pub port: Option<u16>,
|
||||||
|
|
||||||
|
/// Log level (trace, debug, info, warn, error)
|
||||||
|
#[arg(long, default_value = "info")]
|
||||||
|
pub log_level: Option<String>,
|
||||||
|
|
||||||
|
|
||||||
|
/// Validate configuration and exit
|
||||||
|
#[arg(long)]
|
||||||
|
pub validate_config: bool,
|
||||||
|
|
||||||
|
/// Print default configuration and exit
|
||||||
|
#[arg(long)]
|
||||||
|
pub print_default_config: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_args() -> Args {
|
||||||
|
Args::parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_args_parsing() {
|
||||||
|
let args = Args::try_parse_from(&[
|
||||||
|
"xray-admin",
|
||||||
|
"--config", "test.toml",
|
||||||
|
"--port", "9090",
|
||||||
|
"--log-level", "debug"
|
||||||
|
]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(args.config, Some(PathBuf::from("test.toml")));
|
||||||
|
assert_eq!(args.port, Some(9090));
|
||||||
|
assert_eq!(args.log_level, Some("debug".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
104
src/config/env.rs
Normal file
104
src/config/env.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use std::env;
|
||||||
|
|
||||||
|
/// Environment variable utilities
|
||||||
|
pub struct EnvVars;
|
||||||
|
|
||||||
|
impl EnvVars {
|
||||||
|
/// Get environment variable with fallback
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_or_default(key: &str, default: &str) -> String {
|
||||||
|
env::var(key).unwrap_or_else(|_| default.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get required environment variable
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_required(key: &str) -> Result<String, env::VarError> {
|
||||||
|
env::var(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if running in development mode
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_development() -> bool {
|
||||||
|
matches!(
|
||||||
|
env::var("RUST_ENV").as_deref(),
|
||||||
|
Ok("development") | Ok("dev")
|
||||||
|
) || matches!(
|
||||||
|
env::var("ENVIRONMENT").as_deref(),
|
||||||
|
Ok("development") | Ok("dev")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if running in production mode
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_production() -> bool {
|
||||||
|
matches!(
|
||||||
|
env::var("RUST_ENV").as_deref(),
|
||||||
|
Ok("production") | Ok("prod")
|
||||||
|
) || matches!(
|
||||||
|
env::var("ENVIRONMENT").as_deref(),
|
||||||
|
Ok("production") | Ok("prod")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database URL from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn database_url() -> Option<String> {
|
||||||
|
env::var("DATABASE_URL").ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__DATABASE__URL").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get telegram bot token from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn telegram_token() -> Option<String> {
|
||||||
|
env::var("TELEGRAM_BOT_TOKEN").ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__TELEGRAM__BOT_TOKEN").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get JWT secret from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn jwt_secret() -> Option<String> {
|
||||||
|
env::var("JWT_SECRET").ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__WEB__JWT_SECRET").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print environment info for debugging
|
||||||
|
pub fn print_env_info() {
|
||||||
|
tracing::debug!("Environment information:");
|
||||||
|
tracing::debug!(" RUST_ENV: {:?}", env::var("RUST_ENV"));
|
||||||
|
tracing::debug!(" ENVIRONMENT: {:?}", env::var("ENVIRONMENT"));
|
||||||
|
tracing::debug!(" DATABASE_URL: {}",
|
||||||
|
if env::var("DATABASE_URL").is_ok() { "set" } else { "not set" }
|
||||||
|
);
|
||||||
|
tracing::debug!(" TELEGRAM_BOT_TOKEN: {}",
|
||||||
|
if env::var("TELEGRAM_BOT_TOKEN").is_ok() { "set" } else { "not set" }
|
||||||
|
);
|
||||||
|
tracing::debug!(" JWT_SECRET: {}",
|
||||||
|
if env::var("JWT_SECRET").is_ok() { "set" } else { "not set" }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_or_default() {
|
||||||
|
let result = EnvVars::get_or_default("NON_EXISTENT_VAR", "default_value");
|
||||||
|
assert_eq!(result, "default_value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_environment_detection() {
|
||||||
|
env::set_var("RUST_ENV", "development");
|
||||||
|
assert!(EnvVars::is_development());
|
||||||
|
assert!(!EnvVars::is_production());
|
||||||
|
|
||||||
|
env::set_var("RUST_ENV", "production");
|
||||||
|
assert!(!EnvVars::is_development());
|
||||||
|
assert!(EnvVars::is_production());
|
||||||
|
|
||||||
|
env::remove_var("RUST_ENV");
|
||||||
|
}
|
||||||
|
}
|
||||||
165
src/config/file.rs
Normal file
165
src/config/file.rs
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
use anyhow::{Context, Result};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use super::AppConfig;
|
||||||
|
|
||||||
|
/// Configuration file utilities
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub struct ConfigFile;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ConfigFile {
|
||||||
|
/// Load configuration from TOML file
|
||||||
|
pub fn load_toml<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = toml::from_str(&content)
|
||||||
|
.with_context(|| format!("Failed to parse TOML config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load configuration from YAML file
|
||||||
|
pub fn load_yaml<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = serde_yaml::from_str(&content)
|
||||||
|
.with_context(|| format!("Failed to parse YAML config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load configuration from JSON file
|
||||||
|
pub fn load_json<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = serde_json::from_str(&content)
|
||||||
|
.with_context(|| format!("Failed to parse JSON config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Auto-detect format and load configuration file
|
||||||
|
pub fn load_auto<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
match path.extension().and_then(|ext| ext.to_str()) {
|
||||||
|
Some("toml") => Self::load_toml(path),
|
||||||
|
Some("yaml") | Some("yml") => Self::load_yaml(path),
|
||||||
|
Some("json") => Self::load_json(path),
|
||||||
|
_ => {
|
||||||
|
// Try TOML first, then YAML, then JSON
|
||||||
|
Self::load_toml(path)
|
||||||
|
.or_else(|_| Self::load_yaml(path))
|
||||||
|
.or_else(|_| Self::load_json(path))
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to load config file '{}' - tried TOML, YAML, and JSON formats",
|
||||||
|
path.display()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to TOML file
|
||||||
|
pub fn save_toml<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content = toml::to_string_pretty(config)
|
||||||
|
.context("Failed to serialize config to TOML")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to YAML file
|
||||||
|
pub fn save_yaml<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content = serde_yaml::to_string(config)
|
||||||
|
.context("Failed to serialize config to YAML")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to JSON file
|
||||||
|
pub fn save_json<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content = serde_json::to_string_pretty(config)
|
||||||
|
.context("Failed to serialize config to JSON")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if config file exists and is readable
|
||||||
|
pub fn exists_and_readable<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
let path = path.as_ref();
|
||||||
|
path.exists() && path.is_file() && fs::metadata(path).map(|m| !m.permissions().readonly()).unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find default config file in common locations
|
||||||
|
pub fn find_default() -> Option<std::path::PathBuf> {
|
||||||
|
let candidates = [
|
||||||
|
"config.toml",
|
||||||
|
"config.yaml",
|
||||||
|
"config.yml",
|
||||||
|
"config.json",
|
||||||
|
"xray-admin.toml",
|
||||||
|
"xray-admin.yaml",
|
||||||
|
"xray-admin.yml",
|
||||||
|
"/etc/xray-admin/config.toml",
|
||||||
|
"/etc/xray-admin/config.yaml",
|
||||||
|
"~/.config/xray-admin/config.toml",
|
||||||
|
];
|
||||||
|
|
||||||
|
for candidate in &candidates {
|
||||||
|
let path = std::path::Path::new(candidate);
|
||||||
|
if Self::exists_and_readable(path) {
|
||||||
|
return Some(path.to_path_buf());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_save_and_load_toml() -> Result<()> {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
let temp_file = NamedTempFile::new()?;
|
||||||
|
|
||||||
|
ConfigFile::save_toml(&config, temp_file.path())?;
|
||||||
|
let loaded_config = ConfigFile::load_toml(temp_file.path())?;
|
||||||
|
|
||||||
|
assert_eq!(config.web.port, loaded_config.web.port);
|
||||||
|
assert_eq!(config.database.max_connections, loaded_config.database.max_connections);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_auto_detect_format() -> Result<()> {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
|
||||||
|
// Test with .toml extension
|
||||||
|
let temp_file = NamedTempFile::with_suffix(".toml")?;
|
||||||
|
ConfigFile::save_toml(&config, temp_file.path())?;
|
||||||
|
let loaded_config = ConfigFile::load_auto(temp_file.path())?;
|
||||||
|
assert_eq!(config.web.port, loaded_config.web.port);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
244
src/config/mod.rs
Normal file
244
src/config/mod.rs
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub mod args;
|
||||||
|
pub mod env;
|
||||||
|
pub mod file;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct AppConfig {
|
||||||
|
pub database: DatabaseConfig,
|
||||||
|
pub web: WebConfig,
|
||||||
|
pub telegram: TelegramConfig,
|
||||||
|
pub xray: XrayConfig,
|
||||||
|
pub logging: LoggingConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct DatabaseConfig {
|
||||||
|
#[validate(url)]
|
||||||
|
pub url: String,
|
||||||
|
#[validate(range(min = 1, max = 100))]
|
||||||
|
pub max_connections: u32,
|
||||||
|
#[validate(range(min = 1))]
|
||||||
|
pub connection_timeout: u64,
|
||||||
|
pub auto_migrate: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct WebConfig {
|
||||||
|
#[validate(ip)]
|
||||||
|
pub host: String,
|
||||||
|
#[validate(range(min = 1024, max = 65535))]
|
||||||
|
pub port: u16,
|
||||||
|
pub cors_origins: Vec<String>,
|
||||||
|
pub jwt_secret: String,
|
||||||
|
#[validate(range(min = 3600))]
|
||||||
|
pub jwt_expiry: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct TelegramConfig {
|
||||||
|
pub bot_token: String,
|
||||||
|
pub webhook_url: Option<String>,
|
||||||
|
pub admin_chat_ids: Vec<i64>,
|
||||||
|
pub allowed_users: Vec<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct XrayConfig {
|
||||||
|
pub default_api_port: u16,
|
||||||
|
pub config_template_path: PathBuf,
|
||||||
|
pub certificates_path: PathBuf,
|
||||||
|
#[validate(range(min = 1))]
|
||||||
|
pub health_check_interval: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LoggingConfig {
|
||||||
|
pub level: String,
|
||||||
|
pub file_path: Option<PathBuf>,
|
||||||
|
pub json_format: bool,
|
||||||
|
pub max_file_size: Option<u64>,
|
||||||
|
pub max_files: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DatabaseConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
url: "postgresql://xray:password@localhost/xray_admin".to_string(),
|
||||||
|
max_connections: 10,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for WebConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
host: "127.0.0.1".to_string(),
|
||||||
|
port: 8080,
|
||||||
|
cors_origins: vec!["http://localhost:3000".to_string()],
|
||||||
|
jwt_secret: "your-secret-key-change-in-production".to_string(),
|
||||||
|
jwt_expiry: 86400, // 24 hours
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TelegramConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
bot_token: "".to_string(),
|
||||||
|
webhook_url: None,
|
||||||
|
admin_chat_ids: vec![],
|
||||||
|
allowed_users: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for XrayConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
default_api_port: 62789,
|
||||||
|
config_template_path: PathBuf::from("./templates"),
|
||||||
|
certificates_path: PathBuf::from("./certs"),
|
||||||
|
health_check_interval: 30,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoggingConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
level: "info".to_string(),
|
||||||
|
file_path: None,
|
||||||
|
json_format: false,
|
||||||
|
max_file_size: Some(10 * 1024 * 1024), // 10MB
|
||||||
|
max_files: Some(5),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AppConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
database: DatabaseConfig::default(),
|
||||||
|
web: WebConfig::default(),
|
||||||
|
telegram: TelegramConfig::default(),
|
||||||
|
xray: XrayConfig::default(),
|
||||||
|
logging: LoggingConfig::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppConfig {
|
||||||
|
/// Load configuration from multiple sources with priority:
|
||||||
|
/// 1. Command line arguments (highest)
|
||||||
|
/// 2. Environment variables
|
||||||
|
/// 3. Configuration file
|
||||||
|
/// 4. Default values (lowest)
|
||||||
|
pub fn load() -> Result<Self> {
|
||||||
|
let args = args::parse_args();
|
||||||
|
|
||||||
|
let mut builder = config::Config::builder()
|
||||||
|
// Start with defaults
|
||||||
|
.add_source(config::Config::try_from(&AppConfig::default())?);
|
||||||
|
|
||||||
|
// Add configuration file if specified or exists
|
||||||
|
if let Some(config_file) = &args.config {
|
||||||
|
builder = builder.add_source(config::File::from(config_file.as_path()));
|
||||||
|
} else if std::path::Path::new("config.toml").exists() {
|
||||||
|
builder = builder.add_source(config::File::with_name("config"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add environment variables with prefix
|
||||||
|
builder = builder.add_source(
|
||||||
|
config::Environment::with_prefix("XRAY_ADMIN")
|
||||||
|
.separator("__")
|
||||||
|
.try_parsing(true)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Override with command line arguments
|
||||||
|
if let Some(host) = &args.host {
|
||||||
|
builder = builder.set_override("web.host", host.as_str())?;
|
||||||
|
}
|
||||||
|
if let Some(port) = args.port {
|
||||||
|
builder = builder.set_override("web.port", port)?;
|
||||||
|
}
|
||||||
|
if let Some(db_url) = &args.database_url {
|
||||||
|
builder = builder.set_override("database.url", db_url.as_str())?;
|
||||||
|
}
|
||||||
|
if let Some(log_level) = &args.log_level {
|
||||||
|
builder = builder.set_override("logging.level", log_level.as_str())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let config: AppConfig = builder.build()?.try_deserialize()?;
|
||||||
|
|
||||||
|
// Validate configuration
|
||||||
|
config.validate()?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn display_summary(&self) {
|
||||||
|
tracing::info!("Configuration loaded:");
|
||||||
|
tracing::info!(" Database URL: {}", mask_sensitive(&self.database.url));
|
||||||
|
tracing::info!(" Web server: {}:{}", self.web.host, self.web.port);
|
||||||
|
tracing::info!(" Log level: {}", self.logging.level);
|
||||||
|
tracing::info!(" Telegram bot: {}", if self.telegram.bot_token.is_empty() { "disabled" } else { "enabled" });
|
||||||
|
tracing::info!(" Xray config path: {}", self.xray.config_template_path.display());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mask sensitive information in URLs for logging
|
||||||
|
fn mask_sensitive(url: &str) -> String {
|
||||||
|
// Simple string-based approach to mask passwords
|
||||||
|
if let Some(scheme_end) = url.find("://") {
|
||||||
|
let after_scheme = &url[scheme_end + 3..];
|
||||||
|
if let Some(at_pos) = after_scheme.find('@') {
|
||||||
|
let auth_part = &after_scheme[..at_pos];
|
||||||
|
if let Some(colon_pos) = auth_part.find(':') {
|
||||||
|
// Found user:password@host pattern
|
||||||
|
let user = &auth_part[..colon_pos];
|
||||||
|
let host_part = &after_scheme[at_pos..];
|
||||||
|
return format!("{}://{}:***{}", &url[..scheme_end], user, host_part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to URL parsing if simple approach fails
|
||||||
|
if let Ok(parsed) = url::Url::parse(url) {
|
||||||
|
if parsed.password().is_some() {
|
||||||
|
let mut masked = parsed.clone();
|
||||||
|
masked.set_password(Some("***")).unwrap();
|
||||||
|
masked.to_string()
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_config_validation() {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
// Default configuration should be valid
|
||||||
|
assert!(config.validate().is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_sensitive() {
|
||||||
|
let url = "postgresql://user:password@localhost/db";
|
||||||
|
let masked = mask_sensitive(url);
|
||||||
|
assert!(masked.contains("***"));
|
||||||
|
assert!(!masked.contains("password"));
|
||||||
|
}
|
||||||
|
}
|
||||||
243
src/database/entities/certificate.rs
Normal file
243
src/database/entities/certificate.rs
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "certificates")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
#[sea_orm(column_name = "cert_type")]
|
||||||
|
pub cert_type: String,
|
||||||
|
|
||||||
|
pub domain: String,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub cert_data: Vec<u8>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub key_data: Vec<u8>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub chain_data: Option<Vec<u8>>,
|
||||||
|
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub auto_renew: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(has_many = "super::server::Entity")]
|
||||||
|
Servers,
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Servers.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum CertificateType {
|
||||||
|
SelfSigned,
|
||||||
|
Imported,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CertificateType> for String {
|
||||||
|
fn from(cert_type: CertificateType) -> Self {
|
||||||
|
match cert_type {
|
||||||
|
CertificateType::SelfSigned => "self_signed".to_string(),
|
||||||
|
CertificateType::Imported => "imported".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for CertificateType {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"self_signed" => CertificateType::SelfSigned,
|
||||||
|
"imported" => CertificateType::Imported,
|
||||||
|
_ => CertificateType::SelfSigned,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateCertificateDto {
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub certificate_pem: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub private_key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateCertificateDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub auto_renew: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CertificateResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub has_cert_data: bool,
|
||||||
|
pub has_key_data: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CertificateDetailsResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub certificate_pem: String,
|
||||||
|
pub has_private_key: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for CertificateResponse {
|
||||||
|
fn from(cert: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: cert.id,
|
||||||
|
name: cert.name,
|
||||||
|
cert_type: cert.cert_type,
|
||||||
|
domain: cert.domain,
|
||||||
|
expires_at: cert.expires_at,
|
||||||
|
auto_renew: cert.auto_renew,
|
||||||
|
created_at: cert.created_at,
|
||||||
|
updated_at: cert.updated_at,
|
||||||
|
has_cert_data: !cert.cert_data.is_empty(),
|
||||||
|
has_key_data: !cert.key_data.is_empty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for CertificateDetailsResponse {
|
||||||
|
fn from(cert: Model) -> Self {
|
||||||
|
let certificate_pem = cert.certificate_pem();
|
||||||
|
let has_private_key = !cert.key_data.is_empty();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id: cert.id,
|
||||||
|
name: cert.name,
|
||||||
|
cert_type: cert.cert_type,
|
||||||
|
domain: cert.domain,
|
||||||
|
expires_at: cert.expires_at,
|
||||||
|
auto_renew: cert.auto_renew,
|
||||||
|
created_at: cert.created_at,
|
||||||
|
updated_at: cert.updated_at,
|
||||||
|
certificate_pem,
|
||||||
|
has_private_key,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_expired(&self) -> bool {
|
||||||
|
self.expires_at < chrono::Utc::now()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn expires_soon(&self, days: i64) -> bool {
|
||||||
|
let threshold = chrono::Utc::now() + chrono::Duration::days(days);
|
||||||
|
self.expires_at < threshold
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get certificate data as PEM string
|
||||||
|
pub fn certificate_pem(&self) -> String {
|
||||||
|
String::from_utf8_lossy(&self.cert_data).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get private key data as PEM string
|
||||||
|
pub fn private_key_pem(&self) -> String {
|
||||||
|
String::from_utf8_lossy(&self.key_data).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_update(self, dto: UpdateCertificateDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(auto_renew) = dto.auto_renew {
|
||||||
|
active_model.auto_renew = Set(auto_renew);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateCertificateDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateCertificateDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
cert_type: Set(dto.cert_type),
|
||||||
|
domain: Set(dto.domain),
|
||||||
|
cert_data: Set(dto.certificate_pem.into_bytes()),
|
||||||
|
key_data: Set(dto.private_key.into_bytes()),
|
||||||
|
chain_data: Set(None),
|
||||||
|
expires_at: Set(chrono::Utc::now() + chrono::Duration::days(90)), // Default 90 days
|
||||||
|
auto_renew: Set(dto.auto_renew),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
278
src/database/entities/inbound_template.rs
Normal file
278
src/database/entities/inbound_template.rs
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "inbound_templates")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub description: Option<String>,
|
||||||
|
|
||||||
|
pub protocol: String,
|
||||||
|
|
||||||
|
pub default_port: i32,
|
||||||
|
|
||||||
|
pub base_settings: Value,
|
||||||
|
|
||||||
|
pub stream_settings: Value,
|
||||||
|
|
||||||
|
pub requires_tls: bool,
|
||||||
|
|
||||||
|
pub requires_domain: bool,
|
||||||
|
|
||||||
|
pub variables: Value,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum Protocol {
|
||||||
|
Vless,
|
||||||
|
Vmess,
|
||||||
|
Trojan,
|
||||||
|
Shadowsocks,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Protocol> for String {
|
||||||
|
fn from(protocol: Protocol) -> Self {
|
||||||
|
match protocol {
|
||||||
|
Protocol::Vless => "vless".to_string(),
|
||||||
|
Protocol::Vmess => "vmess".to_string(),
|
||||||
|
Protocol::Trojan => "trojan".to_string(),
|
||||||
|
Protocol::Shadowsocks => "shadowsocks".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for Protocol {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"vless" => Protocol::Vless,
|
||||||
|
"vmess" => Protocol::Vmess,
|
||||||
|
"trojan" => Protocol::Trojan,
|
||||||
|
"shadowsocks" => Protocol::Shadowsocks,
|
||||||
|
_ => Protocol::Vless,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TemplateVariable {
|
||||||
|
pub key: String,
|
||||||
|
pub var_type: VariableType,
|
||||||
|
pub required: bool,
|
||||||
|
pub default_value: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum VariableType {
|
||||||
|
String,
|
||||||
|
Number,
|
||||||
|
Path,
|
||||||
|
Domain,
|
||||||
|
Port,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateInboundTemplateDto {
|
||||||
|
pub name: String,
|
||||||
|
pub protocol: String,
|
||||||
|
pub default_port: i32,
|
||||||
|
pub requires_tls: bool,
|
||||||
|
pub config_template: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateInboundTemplateDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub default_port: Option<i32>,
|
||||||
|
pub base_settings: Option<Value>,
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub requires_tls: Option<bool>,
|
||||||
|
pub requires_domain: Option<bool>,
|
||||||
|
pub variables: Option<Vec<TemplateVariable>>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundTemplateResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub protocol: String,
|
||||||
|
pub default_port: i32,
|
||||||
|
pub base_settings: Value,
|
||||||
|
pub stream_settings: Value,
|
||||||
|
pub requires_tls: bool,
|
||||||
|
pub requires_domain: bool,
|
||||||
|
pub variables: Vec<TemplateVariable>,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for InboundTemplateResponse {
|
||||||
|
fn from(template: Model) -> Self {
|
||||||
|
let variables = template.get_variables();
|
||||||
|
Self {
|
||||||
|
id: template.id,
|
||||||
|
name: template.name,
|
||||||
|
description: template.description,
|
||||||
|
protocol: template.protocol,
|
||||||
|
default_port: template.default_port,
|
||||||
|
base_settings: template.base_settings,
|
||||||
|
stream_settings: template.stream_settings,
|
||||||
|
requires_tls: template.requires_tls,
|
||||||
|
requires_domain: template.requires_domain,
|
||||||
|
variables,
|
||||||
|
is_active: template.is_active,
|
||||||
|
created_at: template.created_at,
|
||||||
|
updated_at: template.updated_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateInboundTemplateDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateInboundTemplateDto) -> Self {
|
||||||
|
// Parse config_template as JSON or use default
|
||||||
|
let config_json: Value = serde_json::from_str(&dto.config_template)
|
||||||
|
.unwrap_or_else(|_| serde_json::json!({}));
|
||||||
|
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
description: Set(None),
|
||||||
|
protocol: Set(dto.protocol),
|
||||||
|
default_port: Set(dto.default_port),
|
||||||
|
base_settings: Set(config_json.clone()),
|
||||||
|
stream_settings: Set(serde_json::json!({})),
|
||||||
|
requires_tls: Set(dto.requires_tls),
|
||||||
|
requires_domain: Set(false),
|
||||||
|
variables: Set(Value::Array(vec![])),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn get_variables(&self) -> Vec<TemplateVariable> {
|
||||||
|
serde_json::from_value(self.variables.clone()).unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn apply_variables(&self, values: &serde_json::Map<String, Value>) -> Result<(Value, Value), String> {
|
||||||
|
let base_settings = self.base_settings.clone();
|
||||||
|
let stream_settings = self.stream_settings.clone();
|
||||||
|
|
||||||
|
// Replace variables in JSON using simple string replacement
|
||||||
|
let base_str = base_settings.to_string();
|
||||||
|
let stream_str = stream_settings.to_string();
|
||||||
|
|
||||||
|
let mut result_base = base_str;
|
||||||
|
let mut result_stream = stream_str;
|
||||||
|
|
||||||
|
for (key, value) in values {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
_ => value.to_string(),
|
||||||
|
};
|
||||||
|
result_base = result_base.replace(&placeholder, &replacement);
|
||||||
|
result_stream = result_stream.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
|
||||||
|
let final_base: Value = serde_json::from_str(&result_base)
|
||||||
|
.map_err(|e| format!("Invalid base settings after variable substitution: {}", e))?;
|
||||||
|
let final_stream: Value = serde_json::from_str(&result_stream)
|
||||||
|
.map_err(|e| format!("Invalid stream settings after variable substitution: {}", e))?;
|
||||||
|
|
||||||
|
Ok((final_base, final_stream))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_update(self, dto: UpdateInboundTemplateDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(description) = dto.description {
|
||||||
|
active_model.description = Set(Some(description));
|
||||||
|
}
|
||||||
|
if let Some(default_port) = dto.default_port {
|
||||||
|
active_model.default_port = Set(default_port);
|
||||||
|
}
|
||||||
|
if let Some(base_settings) = dto.base_settings {
|
||||||
|
active_model.base_settings = Set(base_settings);
|
||||||
|
}
|
||||||
|
if let Some(stream_settings) = dto.stream_settings {
|
||||||
|
active_model.stream_settings = Set(stream_settings);
|
||||||
|
}
|
||||||
|
if let Some(requires_tls) = dto.requires_tls {
|
||||||
|
active_model.requires_tls = Set(requires_tls);
|
||||||
|
}
|
||||||
|
if let Some(requires_domain) = dto.requires_domain {
|
||||||
|
active_model.requires_domain = Set(requires_domain);
|
||||||
|
}
|
||||||
|
if let Some(variables) = dto.variables {
|
||||||
|
active_model.variables = Set(serde_json::to_value(variables).unwrap_or(Value::Array(vec![])));
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
168
src/database/entities/inbound_users.rs
Normal file
168
src/database/entities/inbound_users.rs
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "inbound_users")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
|
||||||
|
pub username: String,
|
||||||
|
|
||||||
|
pub email: String,
|
||||||
|
|
||||||
|
pub xray_user_id: String,
|
||||||
|
|
||||||
|
pub level: i32,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server_inbound::Entity",
|
||||||
|
from = "Column::ServerInboundId",
|
||||||
|
to = "super::server_inbound::Column::Id"
|
||||||
|
)]
|
||||||
|
ServerInbound,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbound.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inbound user creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateInboundUserDto {
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub username: String,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CreateInboundUserDto {
|
||||||
|
/// Generate email in format: username@OutFleet
|
||||||
|
pub fn generate_email(&self) -> String {
|
||||||
|
format!("{}@OutFleet", self.username)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate UUID for xray user
|
||||||
|
pub fn generate_xray_user_id(&self) -> String {
|
||||||
|
Uuid::new_v4().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inbound user update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateInboundUserDto {
|
||||||
|
pub username: Option<String>,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateInboundUserDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateInboundUserDto) -> Self {
|
||||||
|
let email = dto.generate_email();
|
||||||
|
let xray_user_id = dto.generate_xray_user_id();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
server_inbound_id: Set(dto.server_inbound_id),
|
||||||
|
username: Set(dto.username),
|
||||||
|
email: Set(email),
|
||||||
|
xray_user_id: Set(xray_user_id),
|
||||||
|
level: Set(dto.level.unwrap_or(0)),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateInboundUserDto
|
||||||
|
pub fn apply_update(self, dto: UpdateInboundUserDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(username) = dto.username {
|
||||||
|
let new_email = format!("{}@OutFleet", username);
|
||||||
|
active_model.username = Set(username);
|
||||||
|
active_model.email = Set(new_email);
|
||||||
|
}
|
||||||
|
if let Some(level) = dto.level {
|
||||||
|
active_model.level = Set(level);
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response model for inbound user
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundUserResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub username: String,
|
||||||
|
pub email: String,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub level: i32,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for InboundUserResponse {
|
||||||
|
fn from(model: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: model.id,
|
||||||
|
server_inbound_id: model.server_inbound_id,
|
||||||
|
username: model.username,
|
||||||
|
email: model.email,
|
||||||
|
xray_user_id: model.xray_user_id,
|
||||||
|
level: model.level,
|
||||||
|
is_active: model.is_active,
|
||||||
|
created_at: model.created_at.to_rfc3339(),
|
||||||
|
updated_at: model.updated_at.to_rfc3339(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
16
src/database/entities/mod.rs
Normal file
16
src/database/entities/mod.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
pub mod user;
|
||||||
|
pub mod certificate;
|
||||||
|
pub mod inbound_template;
|
||||||
|
pub mod server;
|
||||||
|
pub mod server_inbound;
|
||||||
|
pub mod user_access;
|
||||||
|
pub mod inbound_users;
|
||||||
|
|
||||||
|
pub mod prelude {
|
||||||
|
pub use super::certificate::Entity as Certificate;
|
||||||
|
pub use super::inbound_template::Entity as InboundTemplate;
|
||||||
|
pub use super::server::Entity as Server;
|
||||||
|
pub use super::server_inbound::Entity as ServerInbound;
|
||||||
|
pub use super::user_access::Entity as UserAccess;
|
||||||
|
pub use super::inbound_users::Entity as InboundUsers;
|
||||||
|
}
|
||||||
212
src/database/entities/server.rs
Normal file
212
src/database/entities/server.rs
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "servers")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub hostname: String,
|
||||||
|
|
||||||
|
pub grpc_port: i32,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
|
||||||
|
pub status: String,
|
||||||
|
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::certificate::Entity",
|
||||||
|
from = "Column::DefaultCertificateId",
|
||||||
|
to = "super::certificate::Column::Id"
|
||||||
|
)]
|
||||||
|
DefaultCertificate,
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::certificate::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::DefaultCertificate.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
status: Set(ServerStatus::Unknown.into()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum ServerStatus {
|
||||||
|
Unknown,
|
||||||
|
Online,
|
||||||
|
Offline,
|
||||||
|
Error,
|
||||||
|
Connecting,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ServerStatus> for String {
|
||||||
|
fn from(status: ServerStatus) -> Self {
|
||||||
|
match status {
|
||||||
|
ServerStatus::Unknown => "unknown".to_string(),
|
||||||
|
ServerStatus::Online => "online".to_string(),
|
||||||
|
ServerStatus::Offline => "offline".to_string(),
|
||||||
|
ServerStatus::Error => "error".to_string(),
|
||||||
|
ServerStatus::Connecting => "connecting".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for ServerStatus {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"online" => ServerStatus::Online,
|
||||||
|
"offline" => ServerStatus::Offline,
|
||||||
|
"error" => ServerStatus::Error,
|
||||||
|
"connecting" => ServerStatus::Connecting,
|
||||||
|
_ => ServerStatus::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateServerDto {
|
||||||
|
pub name: String,
|
||||||
|
pub hostname: String,
|
||||||
|
pub grpc_port: Option<i32>,
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateServerDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub hostname: Option<String>,
|
||||||
|
pub grpc_port: Option<i32>,
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
pub status: Option<String>,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ServerResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub hostname: String,
|
||||||
|
pub grpc_port: i32,
|
||||||
|
pub status: String,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub has_credentials: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateServerDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateServerDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
hostname: Set(dto.hostname),
|
||||||
|
grpc_port: Set(dto.grpc_port.unwrap_or(2053)),
|
||||||
|
api_credentials: Set(dto.api_credentials),
|
||||||
|
status: Set("unknown".to_string()),
|
||||||
|
default_certificate_id: Set(dto.default_certificate_id),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for ServerResponse {
|
||||||
|
fn from(server: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: server.id,
|
||||||
|
name: server.name,
|
||||||
|
hostname: server.hostname,
|
||||||
|
grpc_port: server.grpc_port,
|
||||||
|
status: server.status,
|
||||||
|
default_certificate_id: server.default_certificate_id,
|
||||||
|
created_at: server.created_at,
|
||||||
|
updated_at: server.updated_at,
|
||||||
|
has_credentials: server.api_credentials.is_some(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn apply_update(self, dto: UpdateServerDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(hostname) = dto.hostname {
|
||||||
|
active_model.hostname = Set(hostname);
|
||||||
|
}
|
||||||
|
if let Some(grpc_port) = dto.grpc_port {
|
||||||
|
active_model.grpc_port = Set(grpc_port);
|
||||||
|
}
|
||||||
|
if let Some(api_credentials) = dto.api_credentials {
|
||||||
|
active_model.api_credentials = Set(Some(api_credentials));
|
||||||
|
}
|
||||||
|
if let Some(status) = dto.status {
|
||||||
|
active_model.status = Set(status);
|
||||||
|
}
|
||||||
|
if let Some(default_certificate_id) = dto.default_certificate_id {
|
||||||
|
active_model.default_certificate_id = Set(Some(default_certificate_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_grpc_endpoint(&self) -> String {
|
||||||
|
format!("{}:{}", self.hostname, self.grpc_port)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_status(&self) -> ServerStatus {
|
||||||
|
self.status.clone().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
204
src/database/entities/server_inbound.rs
Normal file
204
src/database/entities/server_inbound.rs
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "server_inbounds")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub server_id: Uuid,
|
||||||
|
|
||||||
|
pub template_id: Uuid,
|
||||||
|
|
||||||
|
pub tag: String,
|
||||||
|
|
||||||
|
pub port_override: Option<i32>,
|
||||||
|
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
|
||||||
|
pub variable_values: Value,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server::Entity",
|
||||||
|
from = "Column::ServerId",
|
||||||
|
to = "super::server::Column::Id"
|
||||||
|
)]
|
||||||
|
Server,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::inbound_template::Entity",
|
||||||
|
from = "Column::TemplateId",
|
||||||
|
to = "super::inbound_template::Column::Id"
|
||||||
|
)]
|
||||||
|
Template,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::certificate::Entity",
|
||||||
|
from = "Column::CertificateId",
|
||||||
|
to = "super::certificate::Column::Id"
|
||||||
|
)]
|
||||||
|
Certificate,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Server.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::inbound_template::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Template.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::certificate::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Certificate.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateServerInboundDto {
|
||||||
|
pub template_id: Uuid,
|
||||||
|
pub port: i32,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub is_active: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateServerInboundDto {
|
||||||
|
pub tag: Option<String>,
|
||||||
|
pub port_override: Option<i32>,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub variable_values: Option<serde_json::Map<String, Value>>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ServerInboundResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub template_id: Uuid,
|
||||||
|
pub tag: String,
|
||||||
|
pub port: i32,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub variable_values: Value,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
// Populated by joins (simplified for now)
|
||||||
|
pub template_name: Option<String>,
|
||||||
|
pub certificate_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for ServerInboundResponse {
|
||||||
|
fn from(inbound: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: inbound.id,
|
||||||
|
server_id: inbound.server_id,
|
||||||
|
template_id: inbound.template_id,
|
||||||
|
tag: inbound.tag,
|
||||||
|
port: inbound.port_override.unwrap_or(443), // Default port if not set
|
||||||
|
certificate_id: inbound.certificate_id,
|
||||||
|
variable_values: inbound.variable_values,
|
||||||
|
is_active: inbound.is_active,
|
||||||
|
created_at: inbound.created_at,
|
||||||
|
updated_at: inbound.updated_at,
|
||||||
|
template_name: None, // Will be filled by repository if needed
|
||||||
|
certificate_name: None, // Will be filled by repository if needed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn apply_update(self, dto: UpdateServerInboundDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(tag) = dto.tag {
|
||||||
|
active_model.tag = Set(tag);
|
||||||
|
}
|
||||||
|
if let Some(port_override) = dto.port_override {
|
||||||
|
active_model.port_override = Set(Some(port_override));
|
||||||
|
}
|
||||||
|
if let Some(certificate_id) = dto.certificate_id {
|
||||||
|
active_model.certificate_id = Set(Some(certificate_id));
|
||||||
|
}
|
||||||
|
if let Some(variable_values) = dto.variable_values {
|
||||||
|
active_model.variable_values = Set(Value::Object(variable_values));
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_variable_values(&self) -> serde_json::Map<String, Value> {
|
||||||
|
if let Value::Object(map) = &self.variable_values {
|
||||||
|
map.clone()
|
||||||
|
} else {
|
||||||
|
serde_json::Map::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_effective_port(&self, template_default_port: i32) -> i32 {
|
||||||
|
self.port_override.unwrap_or(template_default_port)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateServerInboundDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateServerInboundDto) -> Self {
|
||||||
|
Self {
|
||||||
|
template_id: Set(dto.template_id),
|
||||||
|
tag: Set(format!("inbound-{}", Uuid::new_v4())), // Generate unique tag
|
||||||
|
port_override: Set(Some(dto.port)),
|
||||||
|
certificate_id: Set(dto.certificate_id),
|
||||||
|
variable_values: Set(Value::Object(serde_json::Map::new())),
|
||||||
|
is_active: Set(dto.is_active),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
185
src/database/entities/user.rs
Normal file
185
src/database/entities/user.rs
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "users")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// User display name
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Optional comment/description about the user
|
||||||
|
#[sea_orm(column_type = "Text")]
|
||||||
|
pub comment: Option<String>,
|
||||||
|
|
||||||
|
/// Optional Telegram user ID for bot integration
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
|
||||||
|
/// When the user was registered/created
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
/// Last time user record was updated
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
/// Called before insert and update
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Called before update
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateUserDto {
|
||||||
|
pub name: String,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateUserDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateUserDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateUserDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
comment: Set(dto.comment),
|
||||||
|
telegram_id: Set(dto.telegram_id),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateUserDto
|
||||||
|
pub fn apply_update(self, dto: UpdateUserDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(comment) = dto.comment {
|
||||||
|
active_model.comment = Set(Some(comment));
|
||||||
|
} else if dto.comment.is_some() {
|
||||||
|
// Explicitly set to None if Some(None) was passed
|
||||||
|
active_model.comment = Set(None);
|
||||||
|
}
|
||||||
|
if dto.telegram_id.is_some() {
|
||||||
|
active_model.telegram_id = Set(dto.telegram_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if user has Telegram integration
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn has_telegram(&self) -> bool {
|
||||||
|
self.telegram_id.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get display name with optional comment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn display_name(&self) -> String {
|
||||||
|
match &self.comment {
|
||||||
|
Some(comment) if !comment.is_empty() => format!("{} ({})", self.name, comment),
|
||||||
|
_ => self.name.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_user_dto_conversion() {
|
||||||
|
let dto = CreateUserDto {
|
||||||
|
name: "Test User".to_string(),
|
||||||
|
comment: Some("Test comment".to_string()),
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
};
|
||||||
|
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
|
||||||
|
assert_eq!(active_model.name.unwrap(), "Test User");
|
||||||
|
assert_eq!(active_model.comment.unwrap(), Some("Test comment".to_string()));
|
||||||
|
assert_eq!(active_model.telegram_id.unwrap(), Some(123456789));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_user_display_name() {
|
||||||
|
let user = Model {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
name: "John Doe".to_string(),
|
||||||
|
comment: Some("Admin user".to_string()),
|
||||||
|
telegram_id: None,
|
||||||
|
created_at: chrono::Utc::now(),
|
||||||
|
updated_at: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(user.display_name(), "John Doe (Admin user)");
|
||||||
|
|
||||||
|
let user_no_comment = Model {
|
||||||
|
comment: None,
|
||||||
|
..user
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(user_no_comment.display_name(), "John Doe");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_telegram() {
|
||||||
|
let user_with_telegram = Model {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
name: "User".to_string(),
|
||||||
|
comment: None,
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
created_at: chrono::Utc::now(),
|
||||||
|
updated_at: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let user_without_telegram = Model {
|
||||||
|
telegram_id: None,
|
||||||
|
..user_with_telegram.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(user_with_telegram.has_telegram());
|
||||||
|
assert!(!user_without_telegram.has_telegram());
|
||||||
|
}
|
||||||
|
}
|
||||||
188
src/database/entities/user_access.rs
Normal file
188
src/database/entities/user_access.rs
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{Set, ActiveModelTrait};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "user_access")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// User ID this access is for
|
||||||
|
pub user_id: Uuid,
|
||||||
|
|
||||||
|
/// Server ID this access applies to
|
||||||
|
pub server_id: Uuid,
|
||||||
|
|
||||||
|
/// Server inbound ID this access applies to
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
|
||||||
|
/// User's unique identifier in xray (UUID for VLESS/VMess, password for Trojan)
|
||||||
|
pub xray_user_id: String,
|
||||||
|
|
||||||
|
/// User's email in xray
|
||||||
|
pub xray_email: String,
|
||||||
|
|
||||||
|
/// User level in xray (0-255)
|
||||||
|
pub level: i32,
|
||||||
|
|
||||||
|
/// Whether this access is currently active
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
/// When this access was created
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
/// Last time this access was updated
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server::Entity",
|
||||||
|
from = "Column::ServerId",
|
||||||
|
to = "super::server::Column::Id"
|
||||||
|
)]
|
||||||
|
Server,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server_inbound::Entity",
|
||||||
|
from = "Column::ServerInboundId",
|
||||||
|
to = "super::server_inbound::Column::Id"
|
||||||
|
)]
|
||||||
|
ServerInbound,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Server.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbound.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User access creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateUserAccessDto {
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub xray_email: String,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User access update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateUserAccessDto {
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateUserAccessDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateUserAccessDto) -> Self {
|
||||||
|
Self {
|
||||||
|
user_id: Set(dto.user_id),
|
||||||
|
server_id: Set(dto.server_id),
|
||||||
|
server_inbound_id: Set(dto.server_inbound_id),
|
||||||
|
xray_user_id: Set(dto.xray_user_id),
|
||||||
|
xray_email: Set(dto.xray_email),
|
||||||
|
level: Set(dto.level.unwrap_or(0)),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateUserAccessDto
|
||||||
|
pub fn apply_update(self, dto: UpdateUserAccessDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
if let Some(level) = dto.level {
|
||||||
|
active_model.level = Set(level);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response model for user access
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UserAccessResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub xray_email: String,
|
||||||
|
pub level: i32,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for UserAccessResponse {
|
||||||
|
fn from(model: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: model.id,
|
||||||
|
user_id: model.user_id,
|
||||||
|
server_id: model.server_id,
|
||||||
|
server_inbound_id: model.server_inbound_id,
|
||||||
|
xray_user_id: model.xray_user_id,
|
||||||
|
xray_email: model.xray_email,
|
||||||
|
level: model.level,
|
||||||
|
is_active: model.is_active,
|
||||||
|
created_at: model.created_at.to_rfc3339(),
|
||||||
|
updated_at: model.updated_at.to_rfc3339(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
135
src/database/migrations/m20241201_000001_create_users_table.rs
Normal file
135
src/database/migrations/m20241201_000001_create_users_table.rs
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Create users table
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Users::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::Comment)
|
||||||
|
.text()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::TelegramId)
|
||||||
|
.big_integer()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on name for faster searches
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_name")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::Name)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create unique index on telegram_id (if not null)
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_telegram_id")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::TelegramId)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on created_at for sorting
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_created_at")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::CreatedAt)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop indexes first
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_users_created_at")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_users_telegram_id")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_users_name")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop table
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Users::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Comment,
|
||||||
|
TelegramId,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,120 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Certificates::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::CertType)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Domain)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::CertData)
|
||||||
|
.blob()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::KeyData)
|
||||||
|
.blob()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::ChainData)
|
||||||
|
.blob()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::ExpiresAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::AutoRenew)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on domain for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_certificates_domain")
|
||||||
|
.table(Certificates::Table)
|
||||||
|
.col(Certificates::Domain)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_certificates_domain")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Certificates::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
CertType,
|
||||||
|
Domain,
|
||||||
|
CertData,
|
||||||
|
KeyData,
|
||||||
|
ChainData,
|
||||||
|
ExpiresAt,
|
||||||
|
AutoRenew,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,155 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Description)
|
||||||
|
.text()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Protocol)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::DefaultPort)
|
||||||
|
.integer()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::BaseSettings)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::StreamSettings)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::RequiresTls)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::RequiresDomain)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Variables)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.default(true)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on name for searches
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_inbound_templates_name")
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.col(InboundTemplates::Name)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on protocol
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_inbound_templates_protocol")
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.col(InboundTemplates::Protocol)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_inbound_templates_protocol")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_inbound_templates_name")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(InboundTemplates::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundTemplates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Description,
|
||||||
|
Protocol,
|
||||||
|
DefaultPort,
|
||||||
|
BaseSettings,
|
||||||
|
StreamSettings,
|
||||||
|
RequiresTls,
|
||||||
|
RequiresDomain,
|
||||||
|
Variables,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
136
src/database/migrations/m20241201_000004_create_servers_table.rs
Normal file
136
src/database/migrations/m20241201_000004_create_servers_table.rs
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Servers::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::Hostname)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::GrpcPort)
|
||||||
|
.integer()
|
||||||
|
.default(2053)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::ApiCredentials)
|
||||||
|
.text()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::Status)
|
||||||
|
.string_len(50)
|
||||||
|
.default("unknown")
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::DefaultCertificateId)
|
||||||
|
.uuid()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Foreign key to certificates
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_servers_default_certificate")
|
||||||
|
.from(Servers::Table, Servers::DefaultCertificateId)
|
||||||
|
.to(Certificates::Table, Certificates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on hostname
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_servers_hostname")
|
||||||
|
.table(Servers::Table)
|
||||||
|
.col(Servers::Hostname)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_servers_default_certificate")
|
||||||
|
.table(Servers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_servers_hostname")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Servers::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Hostname,
|
||||||
|
GrpcPort,
|
||||||
|
ApiCredentials,
|
||||||
|
Status,
|
||||||
|
DefaultCertificateId,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,195 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::ServerId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::TemplateId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::Tag)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::PortOverride)
|
||||||
|
.integer()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::CertificateId)
|
||||||
|
.uuid()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::VariableValues)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.default(true)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Foreign keys
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_server")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::ServerId)
|
||||||
|
.to(Servers::Table, Servers::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_template")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::TemplateId)
|
||||||
|
.to(InboundTemplates::Table, InboundTemplates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Restrict)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_certificate")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::CertificateId)
|
||||||
|
.to(Certificates::Table, Certificates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Unique constraint on server_id + tag
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_server_inbounds_server_tag")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.col(ServerInbounds::ServerId)
|
||||||
|
.col(ServerInbounds::Tag)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_certificate")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_template")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_server")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_server_inbounds_server_tag")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(ServerInbounds::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
ServerId,
|
||||||
|
TemplateId,
|
||||||
|
Tag,
|
||||||
|
PortOverride,
|
||||||
|
CertificateId,
|
||||||
|
VariableValues,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundTemplates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,196 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::UserId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::ServerId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::ServerInboundId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::XrayUserId)
|
||||||
|
.string()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::XrayEmail)
|
||||||
|
.string()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::Level)
|
||||||
|
.integer()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_user_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::UserId)
|
||||||
|
.to(Users::Table, Users::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_server_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::ServerId)
|
||||||
|
.to(Servers::Table, Servers::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_server_inbound_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::ServerInboundId)
|
||||||
|
.to(ServerInbounds::Table, ServerInbounds::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create indexes separately
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_server_inbound")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::ServerId)
|
||||||
|
.col(UserAccess::ServerInboundId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_user_server")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::UserId)
|
||||||
|
.col(UserAccess::ServerId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_xray_email")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::XrayEmail)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop indexes first
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_xray_email")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_user_server")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_server_inbound")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop table
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(UserAccess::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum UserAccess {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
UserId,
|
||||||
|
ServerId,
|
||||||
|
ServerInboundId,
|
||||||
|
XrayUserId,
|
||||||
|
XrayEmail,
|
||||||
|
Level,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,125 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::ServerInboundId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Username)
|
||||||
|
.string()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Email)
|
||||||
|
.string()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::XrayUserId)
|
||||||
|
.string()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Level)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.default(0),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.not_null()
|
||||||
|
.default(true),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_inbound_users_server_inbound")
|
||||||
|
.from(InboundUsers::Table, InboundUsers::ServerInboundId)
|
||||||
|
.to(ServerInbounds::Table, ServerInbounds::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create unique constraint: one user per inbound
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::ServerInboundId)
|
||||||
|
.col(InboundUsers::Username)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on email for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_email")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::Email)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(InboundUsers::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundUsers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
ServerInboundId,
|
||||||
|
Username,
|
||||||
|
Email,
|
||||||
|
XrayUserId,
|
||||||
|
Level,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
26
src/database/migrations/mod.rs
Normal file
26
src/database/migrations/mod.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
mod m20241201_000001_create_users_table;
|
||||||
|
mod m20241201_000002_create_certificates_table;
|
||||||
|
mod m20241201_000003_create_inbound_templates_table;
|
||||||
|
mod m20241201_000004_create_servers_table;
|
||||||
|
mod m20241201_000005_create_server_inbounds_table;
|
||||||
|
mod m20241201_000006_create_user_access_table;
|
||||||
|
mod m20241201_000007_create_inbound_users_table;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![
|
||||||
|
Box::new(m20241201_000001_create_users_table::Migration),
|
||||||
|
Box::new(m20241201_000002_create_certificates_table::Migration),
|
||||||
|
Box::new(m20241201_000003_create_inbound_templates_table::Migration),
|
||||||
|
Box::new(m20241201_000004_create_servers_table::Migration),
|
||||||
|
Box::new(m20241201_000005_create_server_inbounds_table::Migration),
|
||||||
|
Box::new(m20241201_000006_create_user_access_table::Migration),
|
||||||
|
Box::new(m20241201_000007_create_inbound_users_table::Migration),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
161
src/database/mod.rs
Normal file
161
src/database/mod.rs
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{Database, DatabaseConnection, ConnectOptions, Statement, DatabaseBackend, ConnectionTrait};
|
||||||
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
|
||||||
|
pub mod entities;
|
||||||
|
pub mod migrations;
|
||||||
|
pub mod repository;
|
||||||
|
|
||||||
|
use migrations::Migrator;
|
||||||
|
|
||||||
|
/// Database connection and management
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DatabaseManager {
|
||||||
|
connection: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DatabaseManager {
|
||||||
|
/// Create a new database connection
|
||||||
|
pub async fn new(config: &DatabaseConfig) -> Result<Self> {
|
||||||
|
info!("Connecting to database...");
|
||||||
|
|
||||||
|
// URL-encode the connection string to handle special characters in passwords
|
||||||
|
let encoded_url = Self::encode_database_url(&config.url)?;
|
||||||
|
|
||||||
|
let mut opt = ConnectOptions::new(&encoded_url);
|
||||||
|
opt.max_connections(config.max_connections)
|
||||||
|
.min_connections(1)
|
||||||
|
.connect_timeout(Duration::from_secs(config.connection_timeout))
|
||||||
|
.acquire_timeout(Duration::from_secs(config.connection_timeout))
|
||||||
|
.idle_timeout(Duration::from_secs(600))
|
||||||
|
.max_lifetime(Duration::from_secs(3600))
|
||||||
|
.sqlx_logging(tracing::level_enabled!(tracing::Level::DEBUG))
|
||||||
|
.sqlx_logging_level(log::LevelFilter::Debug);
|
||||||
|
|
||||||
|
let connection = Database::connect(opt).await?;
|
||||||
|
|
||||||
|
info!("Database connection established successfully");
|
||||||
|
|
||||||
|
let manager = Self { connection };
|
||||||
|
|
||||||
|
// Run migrations if auto_migrate is enabled
|
||||||
|
if config.auto_migrate {
|
||||||
|
manager.migrate().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(manager)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database connection
|
||||||
|
pub fn connection(&self) -> &DatabaseConnection {
|
||||||
|
&self.connection
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run database migrations
|
||||||
|
pub async fn migrate(&self) -> Result<()> {
|
||||||
|
info!("Running database migrations...");
|
||||||
|
|
||||||
|
match Migrator::up(&self.connection, None).await {
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Database migrations completed successfully");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Migration error: {}", e);
|
||||||
|
Err(e.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check database connection health
|
||||||
|
pub async fn health_check(&self) -> Result<bool> {
|
||||||
|
let stmt = Statement::from_string(DatabaseBackend::Postgres, "SELECT 1".to_owned());
|
||||||
|
match self.connection.execute(stmt).await {
|
||||||
|
Ok(_) => Ok(true),
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Database health check failed: {}", e);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database schema information
|
||||||
|
pub async fn get_schema_version(&self) -> Result<Option<String>> {
|
||||||
|
// This would typically query a migrations table
|
||||||
|
// For now, we'll just return a placeholder
|
||||||
|
Ok(Some("1.0.0".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode database URL to handle special characters in passwords
|
||||||
|
fn encode_database_url(url: &str) -> Result<String> {
|
||||||
|
// Parse URL manually to handle special characters in password
|
||||||
|
if let Some(at_pos) = url.rfind('@') {
|
||||||
|
if let Some(_colon_pos) = url[..at_pos].rfind(':') {
|
||||||
|
if let Some(scheme_end) = url.find("://") {
|
||||||
|
let scheme = &url[..scheme_end + 3];
|
||||||
|
let user_pass = &url[scheme_end + 3..at_pos];
|
||||||
|
let host_db = &url[at_pos..];
|
||||||
|
|
||||||
|
if let Some(user_colon) = user_pass.find(':') {
|
||||||
|
let user = &user_pass[..user_colon];
|
||||||
|
let password = &user_pass[user_colon + 1..];
|
||||||
|
|
||||||
|
// URL-encode the password part only
|
||||||
|
let encoded_password = urlencoding::encode(password);
|
||||||
|
let encoded_url = format!("{}{}:{}{}", scheme, user, encoded_password, host_db);
|
||||||
|
|
||||||
|
return Ok(encoded_url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If parsing fails, return original URL
|
||||||
|
Ok(url.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_database_url() {
|
||||||
|
let url_with_special_chars = "postgresql://user:pass#word@localhost:5432/db";
|
||||||
|
let encoded = DatabaseManager::encode_database_url(url_with_special_chars).unwrap();
|
||||||
|
assert_eq!(encoded, "postgresql://user:pass%23word@localhost:5432/db");
|
||||||
|
|
||||||
|
let normal_url = "postgresql://user:password@localhost:5432/db";
|
||||||
|
let encoded_normal = DatabaseManager::encode_database_url(normal_url).unwrap();
|
||||||
|
assert_eq!(encoded_normal, "postgresql://user:password@localhost:5432/db");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_database_connection() {
|
||||||
|
// This test requires a running PostgreSQL database
|
||||||
|
// Skip in CI or when database is not available
|
||||||
|
if std::env::var("DATABASE_URL").is_err() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let config = DatabaseConfig {
|
||||||
|
url: std::env::var("DATABASE_URL").unwrap(),
|
||||||
|
max_connections: 5,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = DatabaseManager::new(&config).await;
|
||||||
|
assert!(db.is_ok());
|
||||||
|
|
||||||
|
if let Ok(db) = db {
|
||||||
|
let health = db.health_check().await;
|
||||||
|
assert!(health.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
75
src/database/repository/certificate.rs
Normal file
75
src/database/repository/certificate.rs
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
use sea_orm::*;
|
||||||
|
use crate::database::entities::{certificate, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CertificateRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CertificateRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, cert_data: certificate::CreateCertificateDto) -> Result<certificate::Model> {
|
||||||
|
let cert = certificate::ActiveModel::from(cert_data);
|
||||||
|
|
||||||
|
let result = Certificate::insert(cert).exec(&self.db).await?;
|
||||||
|
|
||||||
|
Certificate::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created certificate"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<certificate::Model>> {
|
||||||
|
Ok(Certificate::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn find_by_domain(&self, domain: &str) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::Domain.eq(domain))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn find_by_type(&self, cert_type: &str) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::CertType.eq(cert_type))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, cert_data: certificate::UpdateCertificateDto) -> Result<certificate::Model> {
|
||||||
|
let cert = Certificate::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Certificate not found"))?;
|
||||||
|
|
||||||
|
let updated_cert = cert.apply_update(cert_data);
|
||||||
|
|
||||||
|
Ok(updated_cert.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Certificate::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_expiring_soon(&self, days: i64) -> Result<Vec<certificate::Model>> {
|
||||||
|
let threshold = chrono::Utc::now() + chrono::Duration::days(days);
|
||||||
|
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::ExpiresAt.lt(threshold))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
65
src/database/repository/inbound_template.rs
Normal file
65
src/database/repository/inbound_template.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use sea_orm::*;
|
||||||
|
use crate::database::entities::{inbound_template, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct InboundTemplateRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl InboundTemplateRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, template_data: inbound_template::CreateInboundTemplateDto) -> Result<inbound_template::Model> {
|
||||||
|
let template = inbound_template::ActiveModel::from(template_data);
|
||||||
|
|
||||||
|
let result = InboundTemplate::insert(template).exec(&self.db).await?;
|
||||||
|
|
||||||
|
InboundTemplate::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created template"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_name(&self, name: &str) -> Result<Option<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find()
|
||||||
|
.filter(inbound_template::Column::Name.eq(name))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_protocol(&self, protocol: &str) -> Result<Vec<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find()
|
||||||
|
.filter(inbound_template::Column::Protocol.eq(protocol))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, template_data: inbound_template::UpdateInboundTemplateDto) -> Result<inbound_template::Model> {
|
||||||
|
let template = InboundTemplate::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Template not found"))?;
|
||||||
|
|
||||||
|
let updated_template = template.apply_update(template_data);
|
||||||
|
|
||||||
|
Ok(updated_template.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = InboundTemplate::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
132
src/database/repository/inbound_users.rs
Normal file
132
src/database/repository/inbound_users.rs
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, ColumnTrait, QueryFilter, Set};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::inbound_users::{
|
||||||
|
Entity, Model, ActiveModel, CreateInboundUserDto, UpdateInboundUserDto, Column
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct InboundUsersRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InboundUsersRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find().all(&self.db).await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = Entity::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all users for a specific inbound
|
||||||
|
pub async fn find_by_inbound_id(&self, inbound_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find active users for a specific inbound
|
||||||
|
pub async fn find_active_by_inbound_id(&self, inbound_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user by username and inbound (for uniqueness check)
|
||||||
|
pub async fn find_by_username_and_inbound(&self, username: &str, inbound_id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = Entity::find()
|
||||||
|
.filter(Column::Username.eq(username))
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user by email
|
||||||
|
pub async fn find_by_email(&self, email: &str) -> Result<Option<Model>> {
|
||||||
|
let user = Entity::find()
|
||||||
|
.filter(Column::Email.eq(email))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, dto: CreateInboundUserDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let user = active_model.insert(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateInboundUserDto) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated_model = user.apply_update(dto);
|
||||||
|
let updated_user = updated_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Entity::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enable user (set is_active = true)
|
||||||
|
pub async fn enable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = user.into();
|
||||||
|
active_model.is_active = Set(true);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_user = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disable user (set is_active = false)
|
||||||
|
pub async fn disable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = user.into();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_user = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove all users for a specific inbound (when inbound is deleted)
|
||||||
|
pub async fn remove_all_for_inbound(&self, inbound_id: Uuid) -> Result<u64> {
|
||||||
|
let result = Entity::delete_many()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if username already exists on this inbound
|
||||||
|
pub async fn username_exists_on_inbound(&self, username: &str, inbound_id: Uuid) -> Result<bool> {
|
||||||
|
let exists = self.find_by_username_and_inbound(username, inbound_id).await?;
|
||||||
|
Ok(exists.is_some())
|
||||||
|
}
|
||||||
|
}
|
||||||
15
src/database/repository/mod.rs
Normal file
15
src/database/repository/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
pub mod user;
|
||||||
|
pub mod certificate;
|
||||||
|
pub mod inbound_template;
|
||||||
|
pub mod server;
|
||||||
|
pub mod server_inbound;
|
||||||
|
pub mod user_access;
|
||||||
|
pub mod inbound_users;
|
||||||
|
|
||||||
|
pub use user::UserRepository;
|
||||||
|
pub use certificate::CertificateRepository;
|
||||||
|
pub use inbound_template::InboundTemplateRepository;
|
||||||
|
pub use server::ServerRepository;
|
||||||
|
pub use server_inbound::ServerInboundRepository;
|
||||||
|
pub use user_access::UserAccessRepository;
|
||||||
|
pub use inbound_users::InboundUsersRepository;
|
||||||
79
src/database/repository/server.rs
Normal file
79
src/database/repository/server.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
use sea_orm::*;
|
||||||
|
use crate::database::entities::{server, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ServerRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ServerRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, server_data: server::CreateServerDto) -> Result<server::Model> {
|
||||||
|
let server = server::ActiveModel::from(server_data);
|
||||||
|
|
||||||
|
let result = Server::insert(server).exec(&self.db).await?;
|
||||||
|
|
||||||
|
Server::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<server::Model>> {
|
||||||
|
Ok(Server::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_name(&self, name: &str) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Name.eq(name))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_hostname(&self, hostname: &str) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Hostname.eq(hostname))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_status(&self, status: &str) -> Result<Vec<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Status.eq(status))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, server_data: server::UpdateServerDto) -> Result<server::Model> {
|
||||||
|
let server = Server::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server not found"))?;
|
||||||
|
|
||||||
|
let updated_server = server.apply_update(server_data);
|
||||||
|
|
||||||
|
Ok(updated_server.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Server::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_grpc_endpoint(&self, id: Uuid) -> Result<String> {
|
||||||
|
let server = self.find_by_id(id).await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server not found"))?;
|
||||||
|
|
||||||
|
Ok(format!("{}:{}", server.hostname, server.grpc_port))
|
||||||
|
}
|
||||||
|
}
|
||||||
159
src/database/repository/server_inbound.rs
Normal file
159
src/database/repository/server_inbound.rs
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
use sea_orm::*;
|
||||||
|
use crate::database::entities::{server_inbound, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ServerInboundRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ServerInboundRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, server_id: Uuid, inbound_data: server_inbound::CreateServerInboundDto) -> Result<server_inbound::Model> {
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound_data.into();
|
||||||
|
inbound.id = Set(Uuid::new_v4());
|
||||||
|
inbound.server_id = Set(server_id);
|
||||||
|
inbound.created_at = Set(chrono::Utc::now());
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let result = ServerInbound::insert(inbound).exec(&self.db).await?;
|
||||||
|
|
||||||
|
ServerInbound::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server inbound"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_with_protocol(&self, server_id: Uuid, inbound_data: server_inbound::CreateServerInboundDto, protocol: &str) -> Result<server_inbound::Model> {
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound_data.into();
|
||||||
|
inbound.id = Set(Uuid::new_v4());
|
||||||
|
inbound.server_id = Set(server_id);
|
||||||
|
inbound.created_at = Set(chrono::Utc::now());
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
// Override tag with protocol prefix
|
||||||
|
let id = inbound.id.as_ref();
|
||||||
|
inbound.tag = Set(format!("{}-inbound-{}", protocol, id));
|
||||||
|
|
||||||
|
let result = ServerInbound::insert(inbound).exec(&self.db).await?;
|
||||||
|
|
||||||
|
ServerInbound::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server inbound"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_server_id(&self, server_id: Uuid) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_server_id_with_template(&self, server_id: Uuid) -> Result<Vec<server_inbound::ServerInboundResponse>> {
|
||||||
|
use crate::database::entities::{inbound_template, certificate};
|
||||||
|
|
||||||
|
let inbounds = ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut responses = Vec::new();
|
||||||
|
for inbound in inbounds {
|
||||||
|
let mut response = server_inbound::ServerInboundResponse::from(inbound.clone());
|
||||||
|
|
||||||
|
// Load template information
|
||||||
|
if let Ok(Some(template)) = InboundTemplate::find_by_id(inbound.template_id).one(&self.db).await {
|
||||||
|
response.template_name = Some(template.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load certificate information
|
||||||
|
if let Some(cert_id) = inbound.certificate_id {
|
||||||
|
if let Ok(Some(certificate)) = Certificate::find_by_id(cert_id).one(&self.db).await {
|
||||||
|
response.certificate_name = Some(certificate.domain);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
responses.push(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(responses)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_template_id(&self, template_id: Uuid) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::TemplateId.eq(template_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_tag(&self, tag: &str) -> Result<Option<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::Tag.eq(tag))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_active_by_server(&self, server_id: Uuid) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.filter(server_inbound::Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, inbound_data: server_inbound::UpdateServerInboundDto) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let updated_inbound = inbound.apply_update(inbound_data);
|
||||||
|
|
||||||
|
Ok(updated_inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = ServerInbound::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn activate(&self, id: Uuid) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound.into();
|
||||||
|
inbound.is_active = Set(true);
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn deactivate(&self, id: Uuid) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound.into();
|
||||||
|
inbound.is_active = Set(false);
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
157
src/database/repository/user.rs
Normal file
157
src/database/repository/user.rs
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{DatabaseConnection, EntityTrait, QueryFilter, ColumnTrait, QueryOrder, PaginatorTrait};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::user::{Entity as User, Column, Model, ActiveModel, CreateUserDto, UpdateUserDto};
|
||||||
|
|
||||||
|
pub struct UserRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all users with pagination
|
||||||
|
pub async fn get_all(&self, page: u64, per_page: u64) -> Result<Vec<Model>> {
|
||||||
|
let users = User::find()
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page)
|
||||||
|
.fetch_page(page.saturating_sub(1))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user by ID
|
||||||
|
pub async fn get_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = User::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user by telegram ID
|
||||||
|
pub async fn get_by_telegram_id(&self, telegram_id: i64) -> Result<Option<Model>> {
|
||||||
|
let user = User::find()
|
||||||
|
.filter(Column::TelegramId.eq(telegram_id))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Search users by name
|
||||||
|
pub async fn search_by_name(&self, query: &str, page: u64, per_page: u64) -> Result<Vec<Model>> {
|
||||||
|
let users = User::find()
|
||||||
|
.filter(Column::Name.contains(query))
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page)
|
||||||
|
.fetch_page(page.saturating_sub(1))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new user
|
||||||
|
pub async fn create(&self, dto: CreateUserDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let user = User::insert(active_model).exec_with_returning(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update user by ID
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateUserDto) -> Result<Option<Model>> {
|
||||||
|
if let Some(user) = self.get_by_id(id).await? {
|
||||||
|
let active_model = user.apply_update(dto);
|
||||||
|
User::update(active_model).exec(&self.db).await?;
|
||||||
|
// Fetch the updated user
|
||||||
|
self.get_by_id(id).await
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete user by ID
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = User::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get total count of users
|
||||||
|
pub async fn count(&self) -> Result<u64> {
|
||||||
|
let count = User::find().count(&self.db).await?;
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if telegram ID is already used
|
||||||
|
pub async fn telegram_id_exists(&self, telegram_id: i64) -> Result<bool> {
|
||||||
|
let count = User::find()
|
||||||
|
.filter(Column::TelegramId.eq(telegram_id))
|
||||||
|
.count(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(count > 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
|
||||||
|
async fn setup_test_db() -> Result<UserRepository> {
|
||||||
|
let config = DatabaseConfig {
|
||||||
|
url: std::env::var("DATABASE_URL").unwrap_or_else(|_|
|
||||||
|
"sqlite::memory:".to_string()
|
||||||
|
),
|
||||||
|
max_connections: 5,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
let db_manager = DatabaseManager::new(&config).await?;
|
||||||
|
Ok(UserRepository::new(db_manager.connection().clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_user_crud() {
|
||||||
|
let repo = match setup_test_db().await {
|
||||||
|
Ok(repo) => repo,
|
||||||
|
Err(_) => return, // Skip test if no database available
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
let create_dto = CreateUserDto {
|
||||||
|
name: "Test User".to_string(),
|
||||||
|
comment: Some("Test comment".to_string()),
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
};
|
||||||
|
|
||||||
|
let created_user = repo.create(create_dto).await.unwrap();
|
||||||
|
assert_eq!(created_user.name, "Test User");
|
||||||
|
assert_eq!(created_user.telegram_id, Some(123456789));
|
||||||
|
|
||||||
|
// Get by ID
|
||||||
|
let fetched_user = repo.get_by_id(created_user.id).await.unwrap();
|
||||||
|
assert!(fetched_user.is_some());
|
||||||
|
assert_eq!(fetched_user.unwrap().name, "Test User");
|
||||||
|
|
||||||
|
// Update user
|
||||||
|
let update_dto = UpdateUserDto {
|
||||||
|
name: Some("Updated User".to_string()),
|
||||||
|
comment: None,
|
||||||
|
telegram_id: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated_user = repo.update(created_user.id, update_dto).await.unwrap();
|
||||||
|
assert!(updated_user.is_some());
|
||||||
|
assert_eq!(updated_user.unwrap().name, "Updated User");
|
||||||
|
|
||||||
|
// Delete user
|
||||||
|
let deleted = repo.delete(created_user.id).await.unwrap();
|
||||||
|
assert!(deleted);
|
||||||
|
|
||||||
|
// Verify deletion
|
||||||
|
let deleted_user = repo.get_by_id(created_user.id).await.unwrap();
|
||||||
|
assert!(deleted_user.is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
118
src/database/repository/user_access.rs
Normal file
118
src/database/repository/user_access.rs
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
use sea_orm::*;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::database::entities::user_access::{self, Entity as UserAccess, Model, ActiveModel, CreateUserAccessDto, UpdateUserAccessDto};
|
||||||
|
|
||||||
|
pub struct UserAccessRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserAccessRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all user access records
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<Model>> {
|
||||||
|
let records = UserAccess::find().all(&self.db).await?;
|
||||||
|
Ok(records)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user access by ID
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let record = UserAccess::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(record)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user access by user ID
|
||||||
|
pub async fn find_by_user_id(&self, user_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let records = UserAccess::find()
|
||||||
|
.filter(user_access::Column::UserId.eq(user_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(records)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user access by server and inbound
|
||||||
|
pub async fn find_by_server_inbound(&self, server_id: Uuid, server_inbound_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let records = UserAccess::find()
|
||||||
|
.filter(user_access::Column::ServerId.eq(server_id))
|
||||||
|
.filter(user_access::Column::ServerInboundId.eq(server_inbound_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(records)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find active user access for specific user, server and inbound
|
||||||
|
pub async fn find_active_access(&self, user_id: Uuid, server_id: Uuid, server_inbound_id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let record = UserAccess::find()
|
||||||
|
.filter(user_access::Column::UserId.eq(user_id))
|
||||||
|
.filter(user_access::Column::ServerId.eq(server_id))
|
||||||
|
.filter(user_access::Column::ServerInboundId.eq(server_inbound_id))
|
||||||
|
.filter(user_access::Column::IsActive.eq(true))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(record)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new user access
|
||||||
|
pub async fn create(&self, dto: CreateUserAccessDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let model = active_model.insert(&self.db).await?;
|
||||||
|
Ok(model)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update user access
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateUserAccessDto) -> Result<Option<Model>> {
|
||||||
|
let existing = match self.find_by_id(id).await? {
|
||||||
|
Some(model) => model,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let active_model = existing.apply_update(dto);
|
||||||
|
let updated = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete user access
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = UserAccess::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enable user access (set is_active = true)
|
||||||
|
pub async fn enable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
self.update(id, UpdateUserAccessDto {
|
||||||
|
is_active: Some(true),
|
||||||
|
level: None,
|
||||||
|
}).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disable user access (set is_active = false)
|
||||||
|
pub async fn disable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
self.update(id, UpdateUserAccessDto {
|
||||||
|
is_active: Some(false),
|
||||||
|
level: None,
|
||||||
|
}).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all active access for a user
|
||||||
|
pub async fn find_active_for_user(&self, user_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let records = UserAccess::find()
|
||||||
|
.filter(user_access::Column::UserId.eq(user_id))
|
||||||
|
.filter(user_access::Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(records)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove all access for a specific server inbound
|
||||||
|
pub async fn remove_all_for_inbound(&self, server_inbound_id: Uuid) -> Result<u64> {
|
||||||
|
let result = UserAccess::delete_many()
|
||||||
|
.filter(user_access::Column::ServerInboundId.eq(server_inbound_id))
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected)
|
||||||
|
}
|
||||||
|
}
|
||||||
186
src/main.rs
Normal file
186
src/main.rs
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
|
mod config;
|
||||||
|
mod database;
|
||||||
|
mod services;
|
||||||
|
mod web;
|
||||||
|
|
||||||
|
use config::{AppConfig, args::parse_args};
|
||||||
|
use database::DatabaseManager;
|
||||||
|
use services::{TaskScheduler, XrayService};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
// Parse command line arguments first
|
||||||
|
let args = parse_args();
|
||||||
|
|
||||||
|
// Initialize logging early with basic configuration
|
||||||
|
init_logging(&args.log_level.as_deref().unwrap_or("info"))?;
|
||||||
|
|
||||||
|
tracing::info!("Starting Xray Admin Panel v{}", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
|
// Handle special flags
|
||||||
|
if args.print_default_config {
|
||||||
|
print_default_config()?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
let config = match AppConfig::load() {
|
||||||
|
Ok(config) => {
|
||||||
|
tracing::info!("Configuration loaded successfully");
|
||||||
|
config
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to load configuration: {}", e);
|
||||||
|
if args.validate_config {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
tracing::warn!("Using default configuration");
|
||||||
|
AppConfig::default()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate configuration if requested
|
||||||
|
if args.validate_config {
|
||||||
|
tracing::info!("Configuration validation passed");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display configuration summary
|
||||||
|
config.display_summary();
|
||||||
|
|
||||||
|
// Print environment info in debug mode
|
||||||
|
if tracing::level_enabled!(tracing::Level::DEBUG) {
|
||||||
|
config::env::EnvVars::print_env_info();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Initialize database connection
|
||||||
|
tracing::info!("Initializing database connection...");
|
||||||
|
let db = match DatabaseManager::new(&config.database).await {
|
||||||
|
Ok(db) => {
|
||||||
|
tracing::info!("Database initialized successfully");
|
||||||
|
db
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to initialize database: {}", e);
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Perform database health check
|
||||||
|
match db.health_check().await {
|
||||||
|
Ok(true) => tracing::info!("Database health check passed"),
|
||||||
|
Ok(false) => tracing::warn!("Database health check failed"),
|
||||||
|
Err(e) => tracing::error!("Database health check error: {}", e),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get schema version
|
||||||
|
if let Ok(Some(version)) = db.get_schema_version().await {
|
||||||
|
tracing::info!("Database schema version: {}", version);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize event bus first
|
||||||
|
let event_receiver = crate::services::events::init_event_bus();
|
||||||
|
tracing::info!("Event bus initialized");
|
||||||
|
|
||||||
|
// Initialize xray service
|
||||||
|
let xray_service = XrayService::new();
|
||||||
|
|
||||||
|
// Initialize and start task scheduler with dependencies
|
||||||
|
let mut task_scheduler = TaskScheduler::new().await?;
|
||||||
|
task_scheduler.start(db.clone(), xray_service).await?;
|
||||||
|
tracing::info!("Task scheduler started with xray sync");
|
||||||
|
|
||||||
|
// Start event-driven sync handler with the receiver
|
||||||
|
TaskScheduler::start_event_handler(db.clone(), event_receiver).await;
|
||||||
|
tracing::info!("Event-driven sync handler started");
|
||||||
|
|
||||||
|
// Start web server with task scheduler
|
||||||
|
tracing::info!("Starting web server on {}:{}", config.web.host, config.web.port);
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
result = web::start_server(db, config.web.clone()) => {
|
||||||
|
match result {
|
||||||
|
Ok(_) => tracing::info!("Web server stopped gracefully"),
|
||||||
|
Err(e) => tracing::error!("Web server error: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = tokio::signal::ctrl_c() => {
|
||||||
|
tracing::info!("Shutdown signal received, stopping services...");
|
||||||
|
if let Err(e) = task_scheduler.shutdown().await {
|
||||||
|
tracing::error!("Error shutting down task scheduler: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_logging(level: &str) -> Result<()> {
|
||||||
|
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
|
||||||
|
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new(level));
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter)
|
||||||
|
.with(
|
||||||
|
tracing_subscriber::fmt::layer()
|
||||||
|
.with_target(true) // Show module names
|
||||||
|
.with_thread_ids(false)
|
||||||
|
.with_thread_names(false)
|
||||||
|
.with_file(false)
|
||||||
|
.with_line_number(false)
|
||||||
|
.compact()
|
||||||
|
)
|
||||||
|
.try_init()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_default_config() -> Result<()> {
|
||||||
|
let default_config = AppConfig::default();
|
||||||
|
let toml_content = toml::to_string_pretty(&default_config)?;
|
||||||
|
|
||||||
|
println!("# Default configuration for Xray Admin Panel");
|
||||||
|
println!("# Save this to config.toml and modify as needed\n");
|
||||||
|
println!("{}", toml_content);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn mask_url(url: &str) -> String {
|
||||||
|
if let Ok(parsed) = url::Url::parse(url) {
|
||||||
|
if parsed.password().is_some() {
|
||||||
|
let mut masked = parsed.clone();
|
||||||
|
masked.set_password(Some("***")).unwrap();
|
||||||
|
masked.to_string()
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_url() {
|
||||||
|
let url = "postgresql://user:password@localhost/db";
|
||||||
|
let masked = mask_url(url);
|
||||||
|
assert!(masked.contains("***"));
|
||||||
|
assert!(!masked.contains("password"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_url_no_password() {
|
||||||
|
let url = "postgresql://user@localhost/db";
|
||||||
|
let masked = mask_url(url);
|
||||||
|
assert_eq!(masked, url);
|
||||||
|
}
|
||||||
|
}
|
||||||
41
src/services/certificates.rs
Normal file
41
src/services/certificates.rs
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
/// Certificate management service
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CertificateService {
|
||||||
|
// Mock implementation for now
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl CertificateService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate self-signed certificate
|
||||||
|
pub async fn generate_self_signed(&self, domain: &str) -> anyhow::Result<(String, String)> {
|
||||||
|
tracing::info!("Generating self-signed certificate for domain: {}", domain);
|
||||||
|
|
||||||
|
// Mock implementation - would use rcgen to generate actual certificate
|
||||||
|
let cert_pem = format!("-----BEGIN CERTIFICATE-----\nMOCK CERT FOR {}\n-----END CERTIFICATE-----", domain);
|
||||||
|
let key_pem = format!("-----BEGIN PRIVATE KEY-----\nMOCK KEY FOR {}\n-----END PRIVATE KEY-----", domain);
|
||||||
|
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Renew certificate
|
||||||
|
pub async fn renew_certificate(&self, domain: &str) -> anyhow::Result<(String, String)> {
|
||||||
|
tracing::info!("Renewing certificate for domain: {}", domain);
|
||||||
|
|
||||||
|
// Mock implementation
|
||||||
|
let cert_pem = format!("-----BEGIN CERTIFICATE-----\nRENEWED CERT FOR {}\n-----END CERTIFICATE-----", domain);
|
||||||
|
let key_pem = format!("-----BEGIN PRIVATE KEY-----\nRENEWED KEY FOR {}\n-----END PRIVATE KEY-----", domain);
|
||||||
|
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for CertificateService {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
30
src/services/events.rs
Normal file
30
src/services/events.rs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
use std::sync::OnceLock;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum SyncEvent {
|
||||||
|
InboundChanged(Uuid), // server_id
|
||||||
|
UserAccessChanged(Uuid), // server_id
|
||||||
|
}
|
||||||
|
|
||||||
|
static EVENT_SENDER: OnceLock<broadcast::Sender<SyncEvent>> = OnceLock::new();
|
||||||
|
|
||||||
|
/// Initialize the event bus and return a receiver
|
||||||
|
pub fn init_event_bus() -> broadcast::Receiver<SyncEvent> {
|
||||||
|
let (tx, rx) = broadcast::channel(100);
|
||||||
|
EVENT_SENDER.set(tx).expect("Event bus already initialized");
|
||||||
|
rx
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a sync event (non-blocking)
|
||||||
|
pub fn send_sync_event(event: SyncEvent) {
|
||||||
|
if let Some(sender) = EVENT_SENDER.get() {
|
||||||
|
match sender.send(event.clone()) {
|
||||||
|
Ok(_) => tracing::info!("Event sent: {:?}", event),
|
||||||
|
Err(_) => tracing::warn!("No event receivers"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::error!("Event bus not initialized");
|
||||||
|
}
|
||||||
|
}
|
||||||
7
src/services/mod.rs
Normal file
7
src/services/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
pub mod xray;
|
||||||
|
pub mod certificates;
|
||||||
|
pub mod events;
|
||||||
|
pub mod tasks;
|
||||||
|
|
||||||
|
pub use xray::XrayService;
|
||||||
|
pub use tasks::TaskScheduler;
|
||||||
484
src/services/tasks.rs
Normal file
484
src/services/tasks.rs
Normal file
@@ -0,0 +1,484 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use tokio_cron_scheduler::{JobScheduler, Job};
|
||||||
|
use tracing::{info, error, warn};
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
use crate::database::repository::{ServerRepository, ServerInboundRepository, InboundTemplateRepository, InboundUsersRepository, CertificateRepository};
|
||||||
|
use crate::database::entities::inbound_users;
|
||||||
|
use crate::services::XrayService;
|
||||||
|
use crate::services::events::SyncEvent;
|
||||||
|
use sea_orm::{EntityTrait, ColumnTrait, QueryFilter, RelationTrait, JoinType};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
|
||||||
|
pub struct TaskScheduler {
|
||||||
|
scheduler: JobScheduler,
|
||||||
|
task_status: Arc<RwLock<HashMap<String, TaskStatus>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Status of a background task
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TaskStatus {
|
||||||
|
pub name: String,
|
||||||
|
pub description: String,
|
||||||
|
pub schedule: String,
|
||||||
|
pub status: TaskState,
|
||||||
|
pub last_run: Option<DateTime<Utc>>,
|
||||||
|
pub next_run: Option<DateTime<Utc>>,
|
||||||
|
pub total_runs: u64,
|
||||||
|
pub success_count: u64,
|
||||||
|
pub error_count: u64,
|
||||||
|
pub last_error: Option<String>,
|
||||||
|
pub last_duration_ms: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum TaskState {
|
||||||
|
Idle,
|
||||||
|
Running,
|
||||||
|
Success,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaskScheduler {
|
||||||
|
pub async fn new() -> Result<Self> {
|
||||||
|
let scheduler = JobScheduler::new().await?;
|
||||||
|
let task_status = Arc::new(RwLock::new(HashMap::new()));
|
||||||
|
Ok(Self { scheduler, task_status })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get current status of all tasks
|
||||||
|
pub fn get_task_status(&self) -> HashMap<String, TaskStatus> {
|
||||||
|
self.task_status.read().unwrap().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start event-driven sync handler
|
||||||
|
pub async fn start_event_handler(db: DatabaseManager, mut event_receiver: tokio::sync::broadcast::Receiver<SyncEvent>) {
|
||||||
|
let xray_service = XrayService::new();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
info!("Starting event-driven sync handler");
|
||||||
|
|
||||||
|
while let Ok(event) = event_receiver.recv().await {
|
||||||
|
match event {
|
||||||
|
SyncEvent::InboundChanged(server_id) | SyncEvent::UserAccessChanged(server_id) => {
|
||||||
|
info!("Received sync event for server {}", server_id);
|
||||||
|
|
||||||
|
if let Err(e) = sync_single_server_by_id(&xray_service, &db, server_id).await {
|
||||||
|
error!("Failed to sync server {} from event: {}", server_id, e);
|
||||||
|
} else {
|
||||||
|
info!("Successfully synced server {} from event", server_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn start(&mut self, db: DatabaseManager, xray_service: XrayService) -> Result<()> {
|
||||||
|
info!("Starting task scheduler with database synchronization");
|
||||||
|
|
||||||
|
// Initialize task status
|
||||||
|
{
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
status.insert("xray_sync".to_string(), TaskStatus {
|
||||||
|
name: "Xray Synchronization".to_string(),
|
||||||
|
description: "Synchronizes database state with xray servers".to_string(),
|
||||||
|
schedule: "0 * * * * * (every minute)".to_string(),
|
||||||
|
status: TaskState::Idle,
|
||||||
|
last_run: None,
|
||||||
|
next_run: Some(Utc::now() + chrono::Duration::minutes(1)),
|
||||||
|
total_runs: 0,
|
||||||
|
success_count: 0,
|
||||||
|
error_count: 0,
|
||||||
|
last_error: None,
|
||||||
|
last_duration_ms: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run initial sync on startup
|
||||||
|
info!("Running initial xray synchronization on startup");
|
||||||
|
let start_time = Utc::now();
|
||||||
|
self.update_task_status("xray_sync", TaskState::Running, None);
|
||||||
|
|
||||||
|
match sync_xray_state(db.clone(), xray_service.clone()).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
self.update_task_status("xray_sync", TaskState::Success, Some(duration));
|
||||||
|
info!("Initial xray sync completed successfully");
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
self.update_task_status_with_error("xray_sync", e.to_string(), Some(duration));
|
||||||
|
error!("Initial xray sync failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add synchronization task that runs every minute
|
||||||
|
let db_clone = db.clone();
|
||||||
|
let xray_service_clone = xray_service.clone();
|
||||||
|
let task_status_clone = self.task_status.clone();
|
||||||
|
|
||||||
|
let sync_job = Job::new_async("0 */5 * * * *", move |_uuid, _l| {
|
||||||
|
let db = db_clone.clone();
|
||||||
|
let xray_service = xray_service_clone.clone();
|
||||||
|
let task_status = task_status_clone.clone();
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
info!("Running scheduled xray synchronization");
|
||||||
|
let start_time = Utc::now();
|
||||||
|
|
||||||
|
// Update status to running
|
||||||
|
{
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Running;
|
||||||
|
task.last_run = Some(start_time);
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.next_run = Some(start_time + chrono::Duration::minutes(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match sync_xray_state(db, xray_service).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Success;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
info!("Scheduled xray sync completed successfully in {}ms", duration);
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = Some(e.to_string());
|
||||||
|
}
|
||||||
|
error!("Scheduled xray sync failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
|
self.scheduler.add(sync_job).await?;
|
||||||
|
|
||||||
|
info!("Task scheduler started with sync job running every minute");
|
||||||
|
|
||||||
|
self.scheduler.start().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_task_status(&self, task_id: &str, state: TaskState, duration_ms: Option<u64>) {
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut(task_id) {
|
||||||
|
task.status = state;
|
||||||
|
task.last_run = Some(Utc::now());
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = duration_ms;
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_task_status_with_error(&self, task_id: &str, error: String, duration_ms: Option<u64>) {
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut(task_id) {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.last_run = Some(Utc::now());
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = duration_ms;
|
||||||
|
task.last_error = Some(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(&mut self) -> Result<()> {
|
||||||
|
info!("Shutting down task scheduler");
|
||||||
|
self.scheduler.shutdown().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Synchronize xray server state with database state
|
||||||
|
async fn sync_xray_state(db: DatabaseManager, xray_service: XrayService) -> Result<()> {
|
||||||
|
info!("Starting xray state synchronization");
|
||||||
|
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Get all servers from database
|
||||||
|
let servers = match server_repo.find_all().await {
|
||||||
|
Ok(servers) => servers,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to fetch servers: {}", e);
|
||||||
|
return Err(e.into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Found {} servers to synchronize", servers.len());
|
||||||
|
|
||||||
|
for server in servers {
|
||||||
|
info!("Synchronizing server: {} ({}:{})", server.name, server.hostname, server.grpc_port);
|
||||||
|
|
||||||
|
let endpoint = format!("{}:{}", server.hostname, server.grpc_port);
|
||||||
|
|
||||||
|
// Test connection first
|
||||||
|
match xray_service.test_connection(server.id, &endpoint).await {
|
||||||
|
Ok(true) => {
|
||||||
|
info!("Connection to server {} successful", server.name);
|
||||||
|
},
|
||||||
|
Ok(false) => {
|
||||||
|
warn!("Cannot connect to server {} at {}, skipping", server.name, endpoint);
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error testing connection to server {}: {}", server.name, e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get desired inbounds from database
|
||||||
|
let desired_inbounds = match get_desired_inbounds_from_db(&db, &server, &inbound_repo, &template_repo).await {
|
||||||
|
Ok(inbounds) => inbounds,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to get desired inbounds for server {}: {}", server.name, e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Server {}: desired={} inbounds", server.name, desired_inbounds.len());
|
||||||
|
|
||||||
|
// Synchronize inbounds
|
||||||
|
if let Err(e) = sync_server_inbounds(
|
||||||
|
&xray_service,
|
||||||
|
server.id,
|
||||||
|
&endpoint,
|
||||||
|
&desired_inbounds
|
||||||
|
).await {
|
||||||
|
error!("Failed to sync inbounds for server {}: {}", server.name, e);
|
||||||
|
} else {
|
||||||
|
info!("Successfully synchronized server {}", server.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Xray state synchronization completed");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Get desired inbounds configuration from database
|
||||||
|
async fn get_desired_inbounds_from_db(
|
||||||
|
db: &DatabaseManager,
|
||||||
|
server: &crate::database::entities::server::Model,
|
||||||
|
inbound_repo: &ServerInboundRepository,
|
||||||
|
template_repo: &InboundTemplateRepository,
|
||||||
|
) -> Result<HashMap<String, DesiredInbound>> {
|
||||||
|
info!("Getting desired inbounds for server {} from database", server.name);
|
||||||
|
|
||||||
|
// Get all inbounds for this server
|
||||||
|
let inbounds = inbound_repo.find_by_server_id(server.id).await?;
|
||||||
|
let mut desired_inbounds = HashMap::new();
|
||||||
|
|
||||||
|
for inbound in inbounds {
|
||||||
|
// Get template for this inbound
|
||||||
|
let template = match template_repo.find_by_id(inbound.template_id).await? {
|
||||||
|
Some(template) => template,
|
||||||
|
None => {
|
||||||
|
warn!("Template {} not found for inbound {}, skipping", inbound.template_id, inbound.tag);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get users for this inbound
|
||||||
|
let users = get_users_for_inbound(db, inbound.id).await?;
|
||||||
|
|
||||||
|
info!("Inbound {}: {} users found", inbound.tag, users.len());
|
||||||
|
|
||||||
|
// Get port from template or override
|
||||||
|
let port = inbound.port_override.unwrap_or(template.default_port);
|
||||||
|
|
||||||
|
// Get certificate if specified
|
||||||
|
let (cert_pem, key_pem) = if let Some(_cert_id) = inbound.certificate_id {
|
||||||
|
match load_certificate_from_db(db, inbound.certificate_id).await {
|
||||||
|
Ok((cert, key)) => (cert, key),
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to load certificate for inbound {}: {}", inbound.tag, e);
|
||||||
|
(None, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let desired_inbound = DesiredInbound {
|
||||||
|
tag: inbound.tag.clone(),
|
||||||
|
port,
|
||||||
|
protocol: template.protocol.clone(),
|
||||||
|
settings: template.base_settings.clone(),
|
||||||
|
stream_settings: template.stream_settings.clone(),
|
||||||
|
users,
|
||||||
|
cert_pem,
|
||||||
|
key_pem,
|
||||||
|
};
|
||||||
|
|
||||||
|
desired_inbounds.insert(inbound.tag.clone(), desired_inbound);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Found {} desired inbounds for server {}", desired_inbounds.len(), server.name);
|
||||||
|
Ok(desired_inbounds)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get users for specific inbound from database
|
||||||
|
async fn get_users_for_inbound(db: &DatabaseManager, inbound_id: Uuid) -> Result<Vec<XrayUser>> {
|
||||||
|
let inbound_users_repo = InboundUsersRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
let inbound_users = inbound_users_repo.find_active_by_inbound_id(inbound_id).await?;
|
||||||
|
|
||||||
|
let users: Vec<XrayUser> = inbound_users.into_iter().map(|user| {
|
||||||
|
XrayUser {
|
||||||
|
id: user.xray_user_id,
|
||||||
|
email: user.email,
|
||||||
|
level: user.level,
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load certificate from database
|
||||||
|
async fn load_certificate_from_db(db: &DatabaseManager, cert_id: Option<Uuid>) -> Result<(Option<String>, Option<String>)> {
|
||||||
|
let cert_id = match cert_id {
|
||||||
|
Some(id) => id,
|
||||||
|
None => return Ok((None, None)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let cert_repo = CertificateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
match cert_repo.find_by_id(cert_id).await? {
|
||||||
|
Some(cert) => {
|
||||||
|
info!("Loaded certificate: {}", cert.domain);
|
||||||
|
Ok((Some(cert.certificate_pem()), Some(cert.private_key_pem())))
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
warn!("Certificate {} not found", cert_id);
|
||||||
|
Ok((None, None))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Synchronize inbounds for a single server
|
||||||
|
async fn sync_server_inbounds(
|
||||||
|
xray_service: &XrayService,
|
||||||
|
server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
desired_inbounds: &HashMap<String, DesiredInbound>,
|
||||||
|
) -> Result<()> {
|
||||||
|
|
||||||
|
// Create or update inbounds
|
||||||
|
// Since xray has no API to list inbounds, we always recreate them
|
||||||
|
for (tag, desired) in desired_inbounds {
|
||||||
|
info!("Creating/updating inbound: {} with {} users", tag, desired.users.len());
|
||||||
|
|
||||||
|
// Always try to remove inbound first (ignore errors if it doesn't exist)
|
||||||
|
if let Err(e) = xray_service.remove_inbound(server_id, endpoint, tag).await {
|
||||||
|
// Log but don't fail - inbound might not exist
|
||||||
|
info!("Inbound {} removal result: {} (this is normal if inbound didn't exist)", tag, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create inbound with users
|
||||||
|
let users_json: Vec<Value> = desired.users.iter().map(|user| {
|
||||||
|
serde_json::json!({
|
||||||
|
"id": user.id,
|
||||||
|
"email": user.email,
|
||||||
|
"level": user.level
|
||||||
|
})
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
match xray_service.create_inbound_with_users(
|
||||||
|
server_id,
|
||||||
|
endpoint,
|
||||||
|
&desired.tag,
|
||||||
|
desired.port,
|
||||||
|
&desired.protocol,
|
||||||
|
desired.settings.clone(),
|
||||||
|
desired.stream_settings.clone(),
|
||||||
|
&users_json,
|
||||||
|
desired.cert_pem.as_deref(),
|
||||||
|
desired.key_pem.as_deref(),
|
||||||
|
).await {
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Successfully created inbound {} with {} users", tag, desired.users.len());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to create inbound {}: {}", tag, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync a single server by ID (for event-driven sync)
|
||||||
|
async fn sync_single_server_by_id(
|
||||||
|
xray_service: &XrayService,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
server_id: Uuid,
|
||||||
|
) -> Result<()> {
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Get server
|
||||||
|
let server = match server_repo.find_by_id(server_id).await? {
|
||||||
|
Some(server) => server,
|
||||||
|
None => {
|
||||||
|
warn!("Server {} not found for sync", server_id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// For now, sync all servers (can add active/inactive flag later)
|
||||||
|
|
||||||
|
// Get desired inbounds from database
|
||||||
|
let desired_inbounds = get_desired_inbounds_from_db(db, &server, &inbound_repo, &template_repo).await?;
|
||||||
|
|
||||||
|
// Build endpoint
|
||||||
|
let endpoint = format!("{}:{}", server.hostname, server.grpc_port);
|
||||||
|
|
||||||
|
// Sync server
|
||||||
|
sync_server_inbounds(xray_service, server_id, &endpoint, &desired_inbounds).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Represents desired inbound configuration from database
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct DesiredInbound {
|
||||||
|
tag: String,
|
||||||
|
port: i32,
|
||||||
|
protocol: String,
|
||||||
|
settings: Value,
|
||||||
|
stream_settings: Value,
|
||||||
|
users: Vec<XrayUser>,
|
||||||
|
cert_pem: Option<String>,
|
||||||
|
key_pem: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents xray user configuration
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct XrayUser {
|
||||||
|
id: String,
|
||||||
|
email: String,
|
||||||
|
level: i32,
|
||||||
|
}
|
||||||
91
src/services/xray/client.rs
Normal file
91
src/services/xray/client.rs
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
use serde_json::Value;
|
||||||
|
use xray_core::Client;
|
||||||
|
|
||||||
|
// Import submodules from the same directory
|
||||||
|
use super::stats::StatsClient;
|
||||||
|
use super::inbounds::InboundClient;
|
||||||
|
use super::users::UserClient;
|
||||||
|
|
||||||
|
/// Xray gRPC client wrapper
|
||||||
|
pub struct XrayClient {
|
||||||
|
endpoint: String,
|
||||||
|
client: Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayClient {
|
||||||
|
/// Connect to Xray gRPC server
|
||||||
|
pub async fn connect(endpoint: &str) -> Result<Self> {
|
||||||
|
tracing::info!("Connecting to Xray at {}", endpoint);
|
||||||
|
|
||||||
|
let client = Client::from_url(endpoint).await
|
||||||
|
.map_err(|e| anyhow!("Failed to connect to Xray at {}: {}", endpoint, e))?;
|
||||||
|
|
||||||
|
// Don't clone - we'll use &self.client when calling methods
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
endpoint: endpoint.to_string(),
|
||||||
|
client,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server statistics
|
||||||
|
pub async fn get_stats(&self) -> Result<Value> {
|
||||||
|
let stats_client = StatsClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
stats_client.get_stats().await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query specific statistics with pattern
|
||||||
|
pub async fn query_stats(&self, pattern: &str, reset: bool) -> Result<Value> {
|
||||||
|
let stats_client = StatsClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
stats_client.query_stats(pattern, reset).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Restart Xray with new configuration
|
||||||
|
pub async fn restart_with_config(&self, config: &crate::services::xray::XrayConfig) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
inbound_client.restart_with_config(config).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration
|
||||||
|
pub async fn add_inbound(&self, inbound: &Value) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
inbound_client.add_inbound(inbound).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with TLS certificate
|
||||||
|
pub async fn add_inbound_with_certificate(&self, inbound: &Value, cert_pem: Option<&str>, key_pem: Option<&str>) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
inbound_client.add_inbound_with_certificate(inbound, None, cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with users and TLS certificate
|
||||||
|
pub async fn add_inbound_with_users_and_certificate(&self, inbound: &Value, users: &[Value], cert_pem: Option<&str>, key_pem: Option<&str>) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
inbound_client.add_inbound_with_certificate(inbound, Some(users), cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub async fn remove_inbound(&self, tag: &str) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
inbound_client.remove_inbound(tag).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to inbound
|
||||||
|
pub async fn add_user(&self, inbound_tag: &str, user: &Value) -> Result<()> {
|
||||||
|
let user_client = UserClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
user_client.add_user(inbound_tag, user).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from inbound
|
||||||
|
pub async fn remove_user(&self, inbound_tag: &str, email: &str) -> Result<()> {
|
||||||
|
let user_client = UserClient::new(self.endpoint.clone(), &self.client);
|
||||||
|
user_client.remove_user(inbound_tag, email).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get connection endpoint
|
||||||
|
pub fn endpoint(&self) -> &str {
|
||||||
|
&self.endpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
285
src/services/xray/config.rs
Normal file
285
src/services/xray/config.rs
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
/// Xray configuration structure
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct XrayConfig {
|
||||||
|
pub log: LogConfig,
|
||||||
|
pub api: ApiConfig,
|
||||||
|
pub dns: Option<DnsConfig>,
|
||||||
|
pub routing: Option<RoutingConfig>,
|
||||||
|
pub policy: Option<PolicyConfig>,
|
||||||
|
pub inbounds: Vec<InboundConfig>,
|
||||||
|
pub outbounds: Vec<OutboundConfig>,
|
||||||
|
pub transport: Option<TransportConfig>,
|
||||||
|
pub stats: Option<StatsConfig>,
|
||||||
|
pub reverse: Option<ReverseConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LogConfig {
|
||||||
|
pub access: Option<String>,
|
||||||
|
pub error: Option<String>,
|
||||||
|
#[serde(rename = "loglevel")]
|
||||||
|
pub log_level: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ApiConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub listen: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct DnsConfig {
|
||||||
|
pub servers: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct RoutingConfig {
|
||||||
|
#[serde(rename = "domainStrategy")]
|
||||||
|
pub domain_strategy: Option<String>,
|
||||||
|
pub rules: Vec<RoutingRule>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct RoutingRule {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub rule_type: String,
|
||||||
|
pub domain: Option<Vec<String>>,
|
||||||
|
pub ip: Option<Vec<String>>,
|
||||||
|
pub port: Option<String>,
|
||||||
|
#[serde(rename = "outboundTag")]
|
||||||
|
pub outbound_tag: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PolicyConfig {
|
||||||
|
pub levels: HashMap<String, PolicyLevel>,
|
||||||
|
pub system: Option<SystemPolicy>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PolicyLevel {
|
||||||
|
#[serde(rename = "handshakeTimeout")]
|
||||||
|
pub handshake_timeout: Option<u32>,
|
||||||
|
#[serde(rename = "connIdle")]
|
||||||
|
pub conn_idle: Option<u32>,
|
||||||
|
#[serde(rename = "uplinkOnly")]
|
||||||
|
pub uplink_only: Option<u32>,
|
||||||
|
#[serde(rename = "downlinkOnly")]
|
||||||
|
pub downlink_only: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SystemPolicy {
|
||||||
|
#[serde(rename = "statsInboundUplink")]
|
||||||
|
pub stats_inbound_uplink: Option<bool>,
|
||||||
|
#[serde(rename = "statsInboundDownlink")]
|
||||||
|
pub stats_inbound_downlink: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub port: u16,
|
||||||
|
pub listen: Option<String>,
|
||||||
|
pub protocol: String,
|
||||||
|
pub settings: Value,
|
||||||
|
#[serde(rename = "streamSettings")]
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub sniffing: Option<SniffingConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OutboundConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub protocol: String,
|
||||||
|
pub settings: Value,
|
||||||
|
#[serde(rename = "streamSettings")]
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub mux: Option<MuxConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SniffingConfig {
|
||||||
|
pub enabled: bool,
|
||||||
|
#[serde(rename = "destOverride")]
|
||||||
|
pub dest_override: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct MuxConfig {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub concurrency: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TransportConfig {
|
||||||
|
#[serde(rename = "tcpSettings")]
|
||||||
|
pub tcp_settings: Option<Value>,
|
||||||
|
#[serde(rename = "kcpSettings")]
|
||||||
|
pub kcp_settings: Option<Value>,
|
||||||
|
#[serde(rename = "wsSettings")]
|
||||||
|
pub ws_settings: Option<Value>,
|
||||||
|
#[serde(rename = "httpSettings")]
|
||||||
|
pub http_settings: Option<Value>,
|
||||||
|
#[serde(rename = "dsSettings")]
|
||||||
|
pub ds_settings: Option<Value>,
|
||||||
|
#[serde(rename = "quicSettings")]
|
||||||
|
pub quic_settings: Option<Value>,
|
||||||
|
#[serde(rename = "grpcSettings")]
|
||||||
|
pub grpc_settings: Option<Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct StatsConfig {}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ReverseConfig {
|
||||||
|
pub bridges: Option<Vec<BridgeConfig>>,
|
||||||
|
pub portals: Option<Vec<PortalConfig>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct BridgeConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub domain: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PortalConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub domain: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayConfig {
|
||||||
|
/// Create a new basic Xray configuration
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
log: LogConfig {
|
||||||
|
access: Some("/var/log/xray/access.log".to_string()),
|
||||||
|
error: Some("/var/log/xray/error.log".to_string()),
|
||||||
|
log_level: "warning".to_string(),
|
||||||
|
},
|
||||||
|
api: ApiConfig {
|
||||||
|
tag: "api".to_string(),
|
||||||
|
listen: "127.0.0.1:2053".to_string(),
|
||||||
|
},
|
||||||
|
dns: None,
|
||||||
|
routing: Some(RoutingConfig {
|
||||||
|
domain_strategy: Some("IPIfNonMatch".to_string()),
|
||||||
|
rules: vec![
|
||||||
|
RoutingRule {
|
||||||
|
rule_type: "field".to_string(),
|
||||||
|
domain: None,
|
||||||
|
ip: Some(vec!["geoip:private".to_string()]),
|
||||||
|
port: None,
|
||||||
|
outbound_tag: "direct".to_string(),
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
policy: Some(PolicyConfig {
|
||||||
|
levels: {
|
||||||
|
let mut levels = HashMap::new();
|
||||||
|
levels.insert("0".to_string(), PolicyLevel {
|
||||||
|
handshake_timeout: Some(4),
|
||||||
|
conn_idle: Some(300),
|
||||||
|
uplink_only: Some(2),
|
||||||
|
downlink_only: Some(5),
|
||||||
|
});
|
||||||
|
levels
|
||||||
|
},
|
||||||
|
system: Some(SystemPolicy {
|
||||||
|
stats_inbound_uplink: Some(true),
|
||||||
|
stats_inbound_downlink: Some(true),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
inbounds: vec![],
|
||||||
|
outbounds: vec![
|
||||||
|
OutboundConfig {
|
||||||
|
tag: "direct".to_string(),
|
||||||
|
protocol: "freedom".to_string(),
|
||||||
|
settings: serde_json::json!({}),
|
||||||
|
stream_settings: None,
|
||||||
|
mux: None,
|
||||||
|
},
|
||||||
|
OutboundConfig {
|
||||||
|
tag: "blocked".to_string(),
|
||||||
|
protocol: "blackhole".to_string(),
|
||||||
|
settings: serde_json::json!({
|
||||||
|
"response": {
|
||||||
|
"type": "http"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
stream_settings: None,
|
||||||
|
mux: None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
transport: None,
|
||||||
|
stats: Some(StatsConfig {}),
|
||||||
|
reverse: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound to configuration
|
||||||
|
pub fn add_inbound(&mut self, inbound: InboundConfig) {
|
||||||
|
self.inbounds.push(inbound);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub fn remove_inbound(&mut self, tag: &str) -> bool {
|
||||||
|
let initial_len = self.inbounds.len();
|
||||||
|
self.inbounds.retain(|inbound| inbound.tag != tag);
|
||||||
|
self.inbounds.len() != initial_len
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find inbound by tag
|
||||||
|
pub fn find_inbound(&self, tag: &str) -> Option<&InboundConfig> {
|
||||||
|
self.inbounds.iter().find(|inbound| inbound.tag == tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find inbound by tag (mutable)
|
||||||
|
pub fn find_inbound_mut(&mut self, tag: &str) -> Option<&mut InboundConfig> {
|
||||||
|
self.inbounds.iter_mut().find(|inbound| inbound.tag == tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to JSON Value
|
||||||
|
pub fn to_json(&self) -> Value {
|
||||||
|
serde_json::to_value(self).unwrap_or(Value::Null)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create from JSON Value
|
||||||
|
pub fn from_json(value: &Value) -> Result<Self, serde_json::Error> {
|
||||||
|
serde_json::from_value(value.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate configuration
|
||||||
|
pub fn validate(&self) -> Result<(), String> {
|
||||||
|
// Check for duplicate inbound tags
|
||||||
|
let mut tags = std::collections::HashSet::new();
|
||||||
|
for inbound in &self.inbounds {
|
||||||
|
if !tags.insert(&inbound.tag) {
|
||||||
|
return Err(format!("Duplicate inbound tag: {}", inbound.tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicate outbound tags
|
||||||
|
tags.clear();
|
||||||
|
for outbound in &self.outbounds {
|
||||||
|
if !tags.insert(&outbound.tag) {
|
||||||
|
return Err(format!("Duplicate outbound tag: {}", outbound.tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for XrayConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
325
src/services/xray/inbounds.rs
Normal file
325
src/services/xray/inbounds.rs
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
use serde_json::Value;
|
||||||
|
use xray_core::{
|
||||||
|
tonic::Request,
|
||||||
|
app::proxyman::command::{AddInboundRequest, RemoveInboundRequest},
|
||||||
|
core::InboundHandlerConfig,
|
||||||
|
common::serial::TypedMessage,
|
||||||
|
common::protocol::User,
|
||||||
|
app::proxyman::ReceiverConfig,
|
||||||
|
common::net::{PortList, PortRange},
|
||||||
|
transport::internet::StreamConfig,
|
||||||
|
transport::internet::tls::{Config as TlsConfig, Certificate as TlsCertificate},
|
||||||
|
proxy::vless::inbound::Config as VlessInboundConfig,
|
||||||
|
proxy::vless::Account as VlessAccount,
|
||||||
|
proxy::vmess::inbound::Config as VmessInboundConfig,
|
||||||
|
proxy::vmess::Account as VmessAccount,
|
||||||
|
proxy::trojan::ServerConfig as TrojanServerConfig,
|
||||||
|
proxy::trojan::Account as TrojanAccount,
|
||||||
|
proxy::shadowsocks::ServerConfig as ShadowsocksServerConfig,
|
||||||
|
proxy::shadowsocks::Account as ShadowsocksAccount,
|
||||||
|
Client,
|
||||||
|
prost_types,
|
||||||
|
};
|
||||||
|
use prost::Message;
|
||||||
|
|
||||||
|
pub struct InboundClient<'a> {
|
||||||
|
endpoint: String,
|
||||||
|
client: &'a Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> InboundClient<'a> {
|
||||||
|
pub fn new(endpoint: String, client: &'a Client) -> Self {
|
||||||
|
Self { endpoint, client }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration
|
||||||
|
pub async fn add_inbound(&self, inbound: &Value) -> Result<()> {
|
||||||
|
self.add_inbound_with_certificate(inbound, None, None, None).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with TLS certificate and users
|
||||||
|
pub async fn add_inbound_with_certificate(&self, inbound: &Value, users: Option<&[Value]>, cert_pem: Option<&str>, key_pem: Option<&str>) -> Result<()> {
|
||||||
|
tracing::info!("Adding inbound to Xray server at {}", self.endpoint);
|
||||||
|
tracing::debug!("Inbound config: {}", serde_json::to_string_pretty(inbound)?);
|
||||||
|
|
||||||
|
let tag = inbound["tag"].as_str().unwrap_or("").to_string();
|
||||||
|
let port = inbound["port"].as_u64().unwrap_or(8080) as u32;
|
||||||
|
let protocol = inbound["protocol"].as_str().unwrap_or("vless");
|
||||||
|
|
||||||
|
tracing::debug!("Creating inbound: tag={}, port={}, protocol={}", tag, port, protocol);
|
||||||
|
|
||||||
|
// Create receiver configuration (port binding) - use simple port number
|
||||||
|
let port_list = PortList {
|
||||||
|
range: vec![PortRange {
|
||||||
|
from: port,
|
||||||
|
to: port,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create stream settings with TLS if certificates are provided
|
||||||
|
let stream_settings = if cert_pem.is_some() && key_pem.is_some() {
|
||||||
|
let cert_pem = cert_pem.unwrap();
|
||||||
|
let key_pem = key_pem.unwrap();
|
||||||
|
|
||||||
|
tracing::info!("Creating TLS stream settings for inbound");
|
||||||
|
tracing::debug!("Certificate length: {}, Key length: {}", cert_pem.len(), key_pem.len());
|
||||||
|
|
||||||
|
// Create TLS certificate with OneTimeLoading = true
|
||||||
|
// Convert PEM strings to byte vectors (certificate should be raw bytes, not PEM string)
|
||||||
|
let tls_cert = TlsCertificate {
|
||||||
|
certificate: cert_pem.as_bytes().to_vec(), // PEM as bytes
|
||||||
|
key: key_pem.as_bytes().to_vec(), // PEM key as bytes
|
||||||
|
usage: 0, // Default usage
|
||||||
|
ocsp_stapling: 0, // Default OCSP
|
||||||
|
one_time_loading: true, // OneTimeLoading = true as in example
|
||||||
|
build_chain: false,
|
||||||
|
certificate_path: "".to_string(),
|
||||||
|
key_path: "".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create TLS config using Default and set only necessary fields
|
||||||
|
let mut tls_config = TlsConfig::default();
|
||||||
|
tls_config.certificate = vec![tls_cert];
|
||||||
|
|
||||||
|
// Create TLS security settings using prost_types::Any instead of TypedMessage
|
||||||
|
let tls_any = prost_types::Any::from_msg(&tls_config)
|
||||||
|
.map_err(|e| anyhow!("Failed to serialize TLS config: {}", e))?;
|
||||||
|
|
||||||
|
let tls_message = TypedMessage {
|
||||||
|
r#type: tls_any.type_url,
|
||||||
|
value: tls_any.value,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create stream config with TLS security settings
|
||||||
|
Some(StreamConfig {
|
||||||
|
address: None,
|
||||||
|
port: port,
|
||||||
|
protocol_name: "tcp".to_string(),
|
||||||
|
transport_settings: vec![],
|
||||||
|
security_type: "tls".to_string(),
|
||||||
|
security_settings: vec![tls_message],
|
||||||
|
socket_settings: None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
tracing::info!("No certificates provided, creating inbound without TLS");
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let receiver_config = ReceiverConfig {
|
||||||
|
port_list: Some(port_list),
|
||||||
|
listen: None,
|
||||||
|
allocation_strategy: None,
|
||||||
|
stream_settings: stream_settings,
|
||||||
|
receive_original_destination: false,
|
||||||
|
sniffing_settings: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let receiver_message = TypedMessage {
|
||||||
|
r#type: "xray.app.proxyman.ReceiverConfig".to_string(),
|
||||||
|
value: receiver_config.encode_to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create proxy configuration based on protocol with users
|
||||||
|
let proxy_message = match protocol {
|
||||||
|
"vless" => {
|
||||||
|
let mut clients = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let user_id = user["id"].as_str().unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !user_id.is_empty() && !email.is_empty() {
|
||||||
|
let account = VlessAccount {
|
||||||
|
id: user_id,
|
||||||
|
encryption: "none".to_string(),
|
||||||
|
flow: "".to_string(),
|
||||||
|
};
|
||||||
|
clients.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.vless.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let vless_config = VlessInboundConfig {
|
||||||
|
clients,
|
||||||
|
decryption: "none".to_string(),
|
||||||
|
fallbacks: vec![],
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vless.inbound.Config".to_string(),
|
||||||
|
value: vless_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vmess" => {
|
||||||
|
let mut vmess_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let user_id = user["id"].as_str().unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !user_id.is_empty() && !email.is_empty() {
|
||||||
|
let account = VmessAccount {
|
||||||
|
id: user_id,
|
||||||
|
security_settings: None,
|
||||||
|
tests_enabled: "".to_string(),
|
||||||
|
};
|
||||||
|
vmess_users.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.vmess.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let vmess_config = VmessInboundConfig {
|
||||||
|
user: vmess_users,
|
||||||
|
default: None,
|
||||||
|
detour: None,
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vmess.inbound.Config".to_string(),
|
||||||
|
value: vmess_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"trojan" => {
|
||||||
|
let mut trojan_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let password = user["password"].as_str().or_else(|| user["id"].as_str()).unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !password.is_empty() && !email.is_empty() {
|
||||||
|
let account = TrojanAccount {
|
||||||
|
password,
|
||||||
|
};
|
||||||
|
trojan_users.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.trojan.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let trojan_config = TrojanServerConfig {
|
||||||
|
users: trojan_users,
|
||||||
|
fallbacks: vec![],
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.trojan.ServerConfig".to_string(),
|
||||||
|
value: trojan_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"shadowsocks" => {
|
||||||
|
let mut ss_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let password = user["password"].as_str().or_else(|| user["id"].as_str()).unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !password.is_empty() && !email.is_empty() {
|
||||||
|
let account = ShadowsocksAccount {
|
||||||
|
password,
|
||||||
|
cipher_type: 0, // Default cipher
|
||||||
|
iv_check: false, // Default IV check
|
||||||
|
};
|
||||||
|
ss_users.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.shadowsocks.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let shadowsocks_config = ShadowsocksServerConfig {
|
||||||
|
users: ss_users,
|
||||||
|
network: vec![], // Support all networks by default
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.shadowsocks.ServerConfig".to_string(),
|
||||||
|
value: shadowsocks_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!("Unsupported protocol: {}", protocol));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let inbound_config = InboundHandlerConfig {
|
||||||
|
tag: tag.clone(),
|
||||||
|
receiver_settings: Some(receiver_message),
|
||||||
|
proxy_settings: Some(proxy_message),
|
||||||
|
};
|
||||||
|
|
||||||
|
let request = Request::new(AddInboundRequest {
|
||||||
|
inbound: Some(inbound_config),
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::info!("Sending AddInboundRequest for '{}'", tag);
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
match handler_client.add_inbound(request).await {
|
||||||
|
Ok(response) => {
|
||||||
|
let _response_inner = response.into_inner();
|
||||||
|
tracing::info!("Successfully added inbound {}", tag);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to add inbound {}: {}", tag, e);
|
||||||
|
Err(anyhow!("Failed to add inbound {}: {}", tag, e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub async fn remove_inbound(&self, tag: &str) -> Result<()> {
|
||||||
|
tracing::info!("Removing inbound '{}' from Xray server at {}", tag, self.endpoint);
|
||||||
|
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
let request = Request::new(RemoveInboundRequest {
|
||||||
|
tag: tag.to_string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
match handler_client.remove_inbound(request).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully removed inbound");
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove inbound: {}", e);
|
||||||
|
Err(anyhow!("Failed to remove inbound: {}", e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Restart Xray with new configuration
|
||||||
|
pub async fn restart_with_config(&self, config: &crate::services::xray::XrayConfig) -> Result<()> {
|
||||||
|
tracing::info!("Restarting Xray server at {} with new config", self.endpoint);
|
||||||
|
tracing::debug!("Config: {}", serde_json::to_string_pretty(&config.to_json())?);
|
||||||
|
|
||||||
|
// TODO: Implement restart with config using xray-core
|
||||||
|
// For now just return success
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
213
src/services/xray/mod.rs
Normal file
213
src/services/xray/mod.rs
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde_json::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub mod client;
|
||||||
|
pub mod config;
|
||||||
|
pub mod stats;
|
||||||
|
pub mod inbounds;
|
||||||
|
pub mod users;
|
||||||
|
|
||||||
|
pub use client::XrayClient;
|
||||||
|
pub use config::XrayConfig;
|
||||||
|
|
||||||
|
/// Service for managing Xray servers via gRPC
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct XrayService {}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a client for the specified server
|
||||||
|
async fn create_client(&self, endpoint: &str) -> Result<XrayClient> {
|
||||||
|
XrayClient::connect(endpoint).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to Xray server
|
||||||
|
pub async fn test_connection(&self, _server_id: Uuid, endpoint: &str) -> Result<bool> {
|
||||||
|
match self.create_client(endpoint).await {
|
||||||
|
Ok(_client) => {
|
||||||
|
// Instead of getting stats (which might fail), just test connection
|
||||||
|
// If we successfully created the client, connection is working
|
||||||
|
Ok(true)
|
||||||
|
},
|
||||||
|
Err(_) => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply full configuration to Xray server
|
||||||
|
pub async fn apply_config(&self, _server_id: Uuid, endpoint: &str, config: &XrayConfig) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.restart_with_config(config).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create inbound from template
|
||||||
|
pub async fn create_inbound(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
tag: &str,
|
||||||
|
port: i32,
|
||||||
|
protocol: &str,
|
||||||
|
base_settings: Value,
|
||||||
|
stream_settings: Value,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Build inbound configuration from template
|
||||||
|
let inbound_config = serde_json::json!({
|
||||||
|
"tag": tag,
|
||||||
|
"port": port,
|
||||||
|
"protocol": protocol,
|
||||||
|
"settings": base_settings,
|
||||||
|
"streamSettings": stream_settings
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::info!("Creating inbound with config: {}", inbound_config);
|
||||||
|
self.add_inbound(_server_id, endpoint, &inbound_config).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create inbound from template with TLS certificate
|
||||||
|
pub async fn create_inbound_with_certificate(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
tag: &str,
|
||||||
|
port: i32,
|
||||||
|
protocol: &str,
|
||||||
|
base_settings: Value,
|
||||||
|
stream_settings: Value,
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Build inbound configuration from template
|
||||||
|
let inbound_config = serde_json::json!({
|
||||||
|
"tag": tag,
|
||||||
|
"port": port,
|
||||||
|
"protocol": protocol,
|
||||||
|
"settings": base_settings,
|
||||||
|
"streamSettings": stream_settings
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::info!("Creating inbound with TLS certificate and config: {}", inbound_config);
|
||||||
|
self.add_inbound_with_certificate(_server_id, endpoint, &inbound_config, cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound to running Xray instance
|
||||||
|
pub async fn add_inbound(&self, _server_id: Uuid, endpoint: &str, inbound: &Value) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.add_inbound(inbound).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound with certificate to running Xray instance
|
||||||
|
pub async fn add_inbound_with_certificate(&self, _server_id: Uuid, endpoint: &str, inbound: &Value, cert_pem: Option<&str>, key_pem: Option<&str>) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.add_inbound_with_certificate(inbound, cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound with users and certificate to running Xray instance
|
||||||
|
pub async fn add_inbound_with_users_and_certificate(&self, _server_id: Uuid, endpoint: &str, inbound: &Value, users: &[Value], cert_pem: Option<&str>, key_pem: Option<&str>) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.add_inbound_with_users_and_certificate(inbound, users, cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound from running Xray instance
|
||||||
|
pub async fn remove_inbound(&self, _server_id: Uuid, endpoint: &str, tag: &str) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.remove_inbound(tag).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to inbound by recreating the inbound with updated user list
|
||||||
|
pub async fn add_user(&self, _server_id: Uuid, endpoint: &str, inbound_tag: &str, user: &Value) -> Result<()> {
|
||||||
|
tracing::info!("XrayService::add_user called for server {} endpoint {} inbound_tag {}", _server_id, endpoint, inbound_tag);
|
||||||
|
tracing::warn!("Dynamic user addition via AlterInboundRequest doesn't work reliably - need to implement inbound recreation");
|
||||||
|
|
||||||
|
// TODO: Implement inbound recreation approach:
|
||||||
|
// 1. Get current inbound configuration from database
|
||||||
|
// 2. Get existing users from database
|
||||||
|
// 3. Remove old inbound from xray
|
||||||
|
// 4. Create new inbound with all users (existing + new)
|
||||||
|
// For now, return error to indicate this needs to be implemented
|
||||||
|
|
||||||
|
Err(anyhow::anyhow!("User addition requires inbound recreation - not yet implemented. Use web interface to recreate inbound with users."))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create inbound with users list (for inbound recreation approach)
|
||||||
|
pub async fn create_inbound_with_users(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
tag: &str,
|
||||||
|
port: i32,
|
||||||
|
protocol: &str,
|
||||||
|
base_settings: Value,
|
||||||
|
stream_settings: Value,
|
||||||
|
users: &[Value],
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
tracing::info!("Creating inbound '{}' with {} users", tag, users.len());
|
||||||
|
|
||||||
|
// Build inbound configuration with users
|
||||||
|
let mut inbound_config = serde_json::json!({
|
||||||
|
"tag": tag,
|
||||||
|
"port": port,
|
||||||
|
"protocol": protocol,
|
||||||
|
"settings": base_settings,
|
||||||
|
"streamSettings": stream_settings
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add users to settings based on protocol
|
||||||
|
if !users.is_empty() {
|
||||||
|
let mut settings = inbound_config["settings"].clone();
|
||||||
|
match protocol {
|
||||||
|
"vless" | "vmess" => {
|
||||||
|
settings["clients"] = serde_json::Value::Array(users.to_vec());
|
||||||
|
},
|
||||||
|
"trojan" => {
|
||||||
|
settings["clients"] = serde_json::Value::Array(users.to_vec());
|
||||||
|
},
|
||||||
|
"shadowsocks" => {
|
||||||
|
// For shadowsocks, users are handled differently
|
||||||
|
if let Some(user) = users.first() {
|
||||||
|
settings["password"] = user["password"].clone();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow::anyhow!("Unsupported protocol for users: {}", protocol));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inbound_config["settings"] = settings;
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!("Creating inbound with users: {}", serde_json::to_string_pretty(&inbound_config)?);
|
||||||
|
|
||||||
|
// Use the new method with users support
|
||||||
|
self.add_inbound_with_users_and_certificate(_server_id, endpoint, &inbound_config, users, cert_pem, key_pem).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from inbound
|
||||||
|
pub async fn remove_user(&self, _server_id: Uuid, endpoint: &str, inbound_tag: &str, email: &str) -> Result<()> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.remove_user(inbound_tag, email).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server statistics
|
||||||
|
pub async fn get_stats(&self, _server_id: Uuid, endpoint: &str) -> Result<Value> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.get_stats().await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query specific statistics
|
||||||
|
pub async fn query_stats(&self, _server_id: Uuid, endpoint: &str, pattern: &str, reset: bool) -> Result<Value> {
|
||||||
|
let client = self.create_client(endpoint).await?;
|
||||||
|
client.query_stats(pattern, reset).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for XrayService {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
70
src/services/xray/stats.rs
Normal file
70
src/services/xray/stats.rs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
use serde_json::Value;
|
||||||
|
use xray_core::{
|
||||||
|
tonic::Request,
|
||||||
|
app::stats::command::{GetStatsRequest, QueryStatsRequest},
|
||||||
|
Client,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct StatsClient<'a> {
|
||||||
|
endpoint: String,
|
||||||
|
client: &'a Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> StatsClient<'a> {
|
||||||
|
pub fn new(endpoint: String, client: &'a Client) -> Self {
|
||||||
|
Self { endpoint, client }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server statistics
|
||||||
|
pub async fn get_stats(&self) -> Result<Value> {
|
||||||
|
tracing::info!("Getting stats from Xray server at {}", self.endpoint);
|
||||||
|
|
||||||
|
let request = Request::new(GetStatsRequest {
|
||||||
|
name: "".to_string(),
|
||||||
|
reset: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut stats_client = self.client.stats();
|
||||||
|
match stats_client.get_stats(request).await {
|
||||||
|
Ok(response) => {
|
||||||
|
let stats = response.into_inner();
|
||||||
|
tracing::debug!("Stats: {:?}", stats);
|
||||||
|
let stats_json = serde_json::json!({
|
||||||
|
"stats": format!("{:?}", stats.stat)
|
||||||
|
});
|
||||||
|
Ok(stats_json)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to get stats: {}", e);
|
||||||
|
Err(anyhow!("Failed to get stats: {}", e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query specific statistics with pattern
|
||||||
|
pub async fn query_stats(&self, pattern: &str, reset: bool) -> Result<Value> {
|
||||||
|
tracing::info!("Querying stats with pattern '{}', reset: {} from {}", pattern, reset, self.endpoint);
|
||||||
|
|
||||||
|
let request = Request::new(QueryStatsRequest {
|
||||||
|
pattern: pattern.to_string(),
|
||||||
|
reset,
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut stats_client = self.client.stats();
|
||||||
|
match stats_client.query_stats(request).await {
|
||||||
|
Ok(response) => {
|
||||||
|
let stats = response.into_inner();
|
||||||
|
tracing::debug!("Query stats: {:?}", stats);
|
||||||
|
let stats_json = serde_json::json!({
|
||||||
|
"stat": format!("{:?}", stats.stat)
|
||||||
|
});
|
||||||
|
Ok(stats_json)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to query stats: {}", e);
|
||||||
|
Err(anyhow!("Failed to query stats: {}", e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
150
src/services/xray/users.rs
Normal file
150
src/services/xray/users.rs
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
use serde_json::Value;
|
||||||
|
use xray_core::{
|
||||||
|
tonic::Request,
|
||||||
|
app::proxyman::command::{AlterInboundRequest, AddUserOperation, RemoveUserOperation},
|
||||||
|
common::serial::TypedMessage,
|
||||||
|
common::protocol::User,
|
||||||
|
proxy::vless::Account as VlessAccount,
|
||||||
|
proxy::vmess::Account as VmessAccount,
|
||||||
|
proxy::trojan::Account as TrojanAccount,
|
||||||
|
Client,
|
||||||
|
};
|
||||||
|
use prost::Message;
|
||||||
|
|
||||||
|
pub struct UserClient<'a> {
|
||||||
|
endpoint: String,
|
||||||
|
client: &'a Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> UserClient<'a> {
|
||||||
|
pub fn new(endpoint: String, client: &'a Client) -> Self {
|
||||||
|
Self { endpoint, client }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to inbound (simple version that works)
|
||||||
|
pub async fn add_user(&self, inbound_tag: &str, user: &Value) -> Result<()> {
|
||||||
|
tracing::info!("Adding user to inbound '{}' on Xray server at {}", inbound_tag, self.endpoint);
|
||||||
|
tracing::debug!("User config: {}", serde_json::to_string_pretty(user)?);
|
||||||
|
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let user_id = user["id"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
let protocol = user["protocol"].as_str().unwrap_or("vless");
|
||||||
|
|
||||||
|
tracing::info!("Parsed user data: email={}, id={}, level={}, protocol={}", email, user_id, level, protocol);
|
||||||
|
|
||||||
|
if email.is_empty() || user_id.is_empty() {
|
||||||
|
return Err(anyhow!("User email and id are required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user account based on protocol
|
||||||
|
let account_message = match protocol {
|
||||||
|
"vless" => {
|
||||||
|
let account = VlessAccount {
|
||||||
|
id: user_id.clone(),
|
||||||
|
encryption: "none".to_string(),
|
||||||
|
flow: "".to_string(), // Empty flow for basic VLESS
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vless.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vmess" => {
|
||||||
|
let account = VmessAccount {
|
||||||
|
id: user_id,
|
||||||
|
security_settings: None,
|
||||||
|
tests_enabled: "".to_string(),
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vmess.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"trojan" => {
|
||||||
|
let account = TrojanAccount {
|
||||||
|
password: user_id, // For trojan, use password instead of UUID
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.trojan.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!("Unsupported protocol for user: {}", protocol));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create user protobuf message
|
||||||
|
let user_proto = User {
|
||||||
|
level: level,
|
||||||
|
email: email.clone(),
|
||||||
|
account: Some(account_message),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build the AddUserOperation
|
||||||
|
let add_user_op = AddUserOperation {
|
||||||
|
user: Some(user_proto),
|
||||||
|
};
|
||||||
|
|
||||||
|
let typed_message = TypedMessage {
|
||||||
|
r#type: "xray.app.proxyman.command.AddUserOperation".to_string(),
|
||||||
|
value: add_user_op.encode_to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build the AlterInboundRequest
|
||||||
|
let request = Request::new(AlterInboundRequest {
|
||||||
|
tag: inbound_tag.to_string(),
|
||||||
|
operation: Some(typed_message),
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::info!("Sending AlterInboundRequest to add user '{}' to inbound '{}'", email, inbound_tag);
|
||||||
|
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
match handler_client.alter_inbound(request).await {
|
||||||
|
Ok(response) => {
|
||||||
|
let _response_inner = response.into_inner();
|
||||||
|
tracing::info!("Successfully added user '{}' to inbound '{}'", email, inbound_tag);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("gRPC error adding user '{}' to inbound '{}': status={}, message={}",
|
||||||
|
email, inbound_tag, e.code(), e.message());
|
||||||
|
Err(anyhow!("Failed to add user '{}' to inbound '{}': {}", email, inbound_tag, e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from inbound
|
||||||
|
pub async fn remove_user(&self, inbound_tag: &str, email: &str) -> Result<()> {
|
||||||
|
tracing::info!("Removing user '{}' from inbound '{}' on Xray server at {}", email, inbound_tag, self.endpoint);
|
||||||
|
|
||||||
|
// Build the RemoveUserOperation
|
||||||
|
let remove_user_op = RemoveUserOperation {
|
||||||
|
email: email.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let typed_message = TypedMessage {
|
||||||
|
r#type: "xray.app.proxyman.command.RemoveUserOperation".to_string(),
|
||||||
|
value: remove_user_op.encode_to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let request = Request::new(AlterInboundRequest {
|
||||||
|
tag: inbound_tag.to_string(),
|
||||||
|
operation: Some(typed_message),
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
match handler_client.alter_inbound(request).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully removed user '{}' from inbound '{}'", email, inbound_tag);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove user '{}' from inbound '{}': {}", email, inbound_tag, e);
|
||||||
|
Err(anyhow!("Failed to remove user '{}' from inbound '{}': {}", email, inbound_tag, e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
137
src/web/handlers/certificates.rs
Normal file
137
src/web/handlers/certificates.rs
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::Json,
|
||||||
|
Json as JsonExtractor,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use crate::{
|
||||||
|
database::{
|
||||||
|
entities::certificate,
|
||||||
|
repository::CertificateRepository,
|
||||||
|
},
|
||||||
|
services::certificates::CertificateService,
|
||||||
|
web::AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// List all certificates
|
||||||
|
pub async fn list_certificates(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<certificate::CertificateResponse>>, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_all().await {
|
||||||
|
Ok(certificates) => {
|
||||||
|
let responses: Vec<certificate::CertificateResponse> = certificates
|
||||||
|
.into_iter()
|
||||||
|
.map(|c| c.into())
|
||||||
|
.collect();
|
||||||
|
Ok(Json(responses))
|
||||||
|
}
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get certificate by ID
|
||||||
|
pub async fn get_certificate(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<certificate::CertificateResponse>, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(certificate)) => Ok(Json(certificate.into())),
|
||||||
|
Ok(None) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get certificate details with PEM data by ID
|
||||||
|
pub async fn get_certificate_details(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<certificate::CertificateDetailsResponse>, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(certificate)) => Ok(Json(certificate.into())),
|
||||||
|
Ok(None) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new certificate
|
||||||
|
pub async fn create_certificate(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
JsonExtractor(cert_data): JsonExtractor<certificate::CreateCertificateDto>,
|
||||||
|
) -> Result<Json<certificate::CertificateResponse>, StatusCode> {
|
||||||
|
tracing::info!("Creating certificate: {:?}", cert_data);
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
let cert_service = CertificateService::new();
|
||||||
|
|
||||||
|
// Generate certificate based on type
|
||||||
|
let (cert_pem, private_key) = match cert_data.cert_type.as_str() {
|
||||||
|
"self_signed" => {
|
||||||
|
cert_service.generate_self_signed(&cert_data.domain).await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||||
|
}
|
||||||
|
_ => return Err(StatusCode::BAD_REQUEST),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create certificate with generated data
|
||||||
|
let mut create_dto = cert_data;
|
||||||
|
create_dto.certificate_pem = cert_pem;
|
||||||
|
create_dto.private_key = private_key;
|
||||||
|
|
||||||
|
match repo.create(create_dto).await {
|
||||||
|
Ok(certificate) => Ok(Json(certificate.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update certificate
|
||||||
|
pub async fn update_certificate(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
JsonExtractor(cert_data): JsonExtractor<certificate::UpdateCertificateDto>,
|
||||||
|
) -> Result<Json<certificate::CertificateResponse>, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.update(id, cert_data).await {
|
||||||
|
Ok(certificate) => Ok(Json(certificate.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete certificate
|
||||||
|
pub async fn delete_certificate(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.delete(id).await {
|
||||||
|
Ok(true) => Ok(StatusCode::NO_CONTENT),
|
||||||
|
Ok(false) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get certificates expiring soon
|
||||||
|
pub async fn get_expiring_certificates(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<certificate::CertificateResponse>>, StatusCode> {
|
||||||
|
let repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get certificates expiring in next 30 days
|
||||||
|
match repo.find_expiring_soon(30).await {
|
||||||
|
Ok(certificates) => {
|
||||||
|
let responses: Vec<certificate::CertificateResponse> = certificates
|
||||||
|
.into_iter()
|
||||||
|
.map(|c| c.into())
|
||||||
|
.collect();
|
||||||
|
Ok(Json(responses))
|
||||||
|
}
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
9
src/web/handlers/mod.rs
Normal file
9
src/web/handlers/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
pub mod users;
|
||||||
|
pub mod servers;
|
||||||
|
pub mod certificates;
|
||||||
|
pub mod templates;
|
||||||
|
|
||||||
|
pub use users::*;
|
||||||
|
pub use servers::*;
|
||||||
|
pub use certificates::*;
|
||||||
|
pub use templates::*;
|
||||||
575
src/web/handlers/servers.rs
Normal file
575
src/web/handlers/servers.rs
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::Json,
|
||||||
|
Json as JsonExtractor,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use crate::{
|
||||||
|
database::{
|
||||||
|
entities::{server, server_inbound},
|
||||||
|
repository::{ServerRepository, ServerInboundRepository, InboundTemplateRepository, CertificateRepository, InboundUsersRepository},
|
||||||
|
},
|
||||||
|
web::AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// List all servers
|
||||||
|
pub async fn list_servers(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<server::ServerResponse>>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_all().await {
|
||||||
|
Ok(servers) => {
|
||||||
|
let responses: Vec<server::ServerResponse> = servers
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| s.into())
|
||||||
|
.collect();
|
||||||
|
Ok(Json(responses))
|
||||||
|
}
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server by ID
|
||||||
|
pub async fn get_server(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<server::ServerResponse>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(server)) => Ok(Json(server.into())),
|
||||||
|
Ok(None) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new server
|
||||||
|
pub async fn create_server(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Json(server_data): Json<server::CreateServerDto>,
|
||||||
|
) -> Result<Json<server::ServerResponse>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.create(server_data).await {
|
||||||
|
Ok(server) => Ok(Json(server.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update server
|
||||||
|
pub async fn update_server(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(server_data): Json<server::UpdateServerDto>,
|
||||||
|
) -> Result<Json<server::ServerResponse>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.update(id, server_data).await {
|
||||||
|
Ok(server) => Ok(Json(server.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete server
|
||||||
|
pub async fn delete_server(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.delete(id).await {
|
||||||
|
Ok(true) => Ok(StatusCode::NO_CONTENT),
|
||||||
|
Ok(false) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test server connection
|
||||||
|
pub async fn test_server_connection(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let server = match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
|
||||||
|
match app_state.xray_service.test_connection(id, &endpoint).await {
|
||||||
|
Ok(connected) => {
|
||||||
|
// Update server status based on connection test
|
||||||
|
let new_status = if connected { "online" } else { "offline" };
|
||||||
|
let update_dto = server::UpdateServerDto {
|
||||||
|
name: None,
|
||||||
|
hostname: None,
|
||||||
|
grpc_port: None,
|
||||||
|
api_credentials: None,
|
||||||
|
default_certificate_id: None,
|
||||||
|
status: Some(new_status.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = repo.update(id, update_dto).await; // Ignore update errors for now
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"connected": connected,
|
||||||
|
"endpoint": endpoint
|
||||||
|
})))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
// Update status to error
|
||||||
|
let update_dto = server::UpdateServerDto {
|
||||||
|
name: None,
|
||||||
|
hostname: None,
|
||||||
|
grpc_port: None,
|
||||||
|
api_credentials: None,
|
||||||
|
default_certificate_id: None,
|
||||||
|
status: Some("error".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = repo.update(id, update_dto).await; // Ignore update errors for now
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"connected": false,
|
||||||
|
"endpoint": endpoint,
|
||||||
|
"error": e.to_string()
|
||||||
|
})))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server statistics
|
||||||
|
pub async fn get_server_stats(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
let repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let server = match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
|
||||||
|
match app_state.xray_service.get_stats(id, &endpoint).await {
|
||||||
|
Ok(stats) => Ok(Json(stats)),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List server inbounds
|
||||||
|
pub async fn list_server_inbounds(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(server_id): Path<Uuid>,
|
||||||
|
) -> Result<Json<Vec<server_inbound::ServerInboundResponse>>, StatusCode> {
|
||||||
|
let repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_by_server_id_with_template(server_id).await {
|
||||||
|
Ok(responses) => Ok(Json(responses)),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create server inbound
|
||||||
|
pub async fn create_server_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(server_id): Path<Uuid>,
|
||||||
|
JsonExtractor(inbound_data): JsonExtractor<server_inbound::CreateServerInboundDto>,
|
||||||
|
) -> Result<Json<server_inbound::ServerInboundResponse>, StatusCode> {
|
||||||
|
tracing::info!("Creating server inbound for server {}: {:?}", server_id, inbound_data);
|
||||||
|
|
||||||
|
let server_repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
let cert_repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get server info
|
||||||
|
let server = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get template info
|
||||||
|
let template = match template_repo.find_by_id(inbound_data.template_id).await {
|
||||||
|
Ok(Some(template)) => template,
|
||||||
|
Ok(None) => return Err(StatusCode::BAD_REQUEST),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create inbound in database first with protocol-aware tag
|
||||||
|
let inbound = match inbound_repo.create_with_protocol(server_id, inbound_data, &template.protocol).await {
|
||||||
|
Ok(inbound) => {
|
||||||
|
// Send sync event for immediate synchronization
|
||||||
|
crate::services::events::send_sync_event(
|
||||||
|
crate::services::events::SyncEvent::InboundChanged(server_id)
|
||||||
|
);
|
||||||
|
inbound
|
||||||
|
},
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to create inbound on xray server only if it's active
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
if inbound.is_active {
|
||||||
|
// Get certificate data if certificate is specified
|
||||||
|
let (cert_pem, key_pem) = if let Some(cert_id) = inbound.certificate_id {
|
||||||
|
match cert_repo.find_by_id(cert_id).await {
|
||||||
|
Ok(Some(cert)) => {
|
||||||
|
tracing::info!("Using certificate {} for inbound {}", cert.domain, inbound.tag);
|
||||||
|
(Some(cert.certificate_pem()), Some(cert.private_key_pem()))
|
||||||
|
},
|
||||||
|
Ok(None) => {
|
||||||
|
tracing::warn!("Certificate {} not found, creating inbound without TLS", cert_id);
|
||||||
|
(None, None)
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Error fetching certificate {}: {}", cert_id, e);
|
||||||
|
(None, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!("No certificate specified for inbound {}, creating without TLS", inbound.tag);
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
match app_state.xray_service.create_inbound_with_certificate(
|
||||||
|
server_id,
|
||||||
|
&endpoint,
|
||||||
|
&inbound.tag,
|
||||||
|
inbound.port_override.unwrap_or(template.default_port),
|
||||||
|
&template.protocol,
|
||||||
|
template.base_settings.clone(),
|
||||||
|
template.stream_settings.clone(),
|
||||||
|
cert_pem.as_deref(),
|
||||||
|
key_pem.as_deref(),
|
||||||
|
).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully created inbound {} on xray server {}", inbound.tag, endpoint);
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to create inbound on xray server {}: {}", endpoint, e);
|
||||||
|
// Note: We don't fail the request since the inbound is already in DB
|
||||||
|
// The user can manually sync or retry later
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!("Inbound {} created as inactive, skipping xray server creation", inbound.tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(inbound.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update server inbound
|
||||||
|
pub async fn update_server_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path((server_id, inbound_id)): Path<(Uuid, Uuid)>,
|
||||||
|
JsonExtractor(inbound_data): JsonExtractor<server_inbound::UpdateServerInboundDto>,
|
||||||
|
) -> Result<Json<server_inbound::ServerInboundResponse>, StatusCode> {
|
||||||
|
tracing::info!("Updating server inbound {} for server {}: {:?}", inbound_id, server_id, inbound_data);
|
||||||
|
|
||||||
|
let server_repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
let cert_repo = CertificateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get server info
|
||||||
|
let server = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get current inbound state
|
||||||
|
let current_inbound = match inbound_repo.find_by_id(inbound_id).await {
|
||||||
|
Ok(Some(inbound)) if inbound.server_id == server_id => inbound,
|
||||||
|
Ok(Some(_)) => return Err(StatusCode::BAD_REQUEST),
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if is_active status is changing
|
||||||
|
let old_is_active = current_inbound.is_active;
|
||||||
|
let new_is_active = inbound_data.is_active.unwrap_or(old_is_active);
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
|
||||||
|
// Handle xray server changes based on active status change
|
||||||
|
if old_is_active && !new_is_active {
|
||||||
|
// Becoming inactive - remove from xray server
|
||||||
|
tracing::info!("Inbound {} becoming inactive, removing from xray server {}", current_inbound.tag, endpoint);
|
||||||
|
match app_state.xray_service.remove_inbound(server_id, &endpoint, ¤t_inbound.tag).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully removed inbound {} from xray server", current_inbound.tag);
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove inbound {} from xray server: {}", current_inbound.tag, e);
|
||||||
|
// Continue with database update even if xray removal fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if !old_is_active && new_is_active {
|
||||||
|
// Becoming active - add to xray server
|
||||||
|
tracing::info!("Inbound {} becoming active, adding to xray server {}", current_inbound.tag, endpoint);
|
||||||
|
|
||||||
|
// Get template info for recreation
|
||||||
|
let template = match template_repo.find_by_id(current_inbound.template_id).await {
|
||||||
|
Ok(Some(template)) => template,
|
||||||
|
Ok(None) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use updated port if provided, otherwise keep current
|
||||||
|
let port = inbound_data.port_override.unwrap_or(current_inbound.port_override.unwrap_or(template.default_port));
|
||||||
|
|
||||||
|
// Get certificate data if certificate is specified (could be updated)
|
||||||
|
let certificate_id = inbound_data.certificate_id.or(current_inbound.certificate_id);
|
||||||
|
let (cert_pem, key_pem) = if let Some(cert_id) = certificate_id {
|
||||||
|
match cert_repo.find_by_id(cert_id).await {
|
||||||
|
Ok(Some(cert)) => {
|
||||||
|
tracing::info!("Using certificate {} for inbound {}", cert.domain, current_inbound.tag);
|
||||||
|
(Some(cert.certificate_pem()), Some(cert.private_key_pem()))
|
||||||
|
},
|
||||||
|
Ok(None) => {
|
||||||
|
tracing::warn!("Certificate {} not found, creating inbound without TLS", cert_id);
|
||||||
|
(None, None)
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Error fetching certificate {}: {}", cert_id, e);
|
||||||
|
(None, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!("No certificate specified for inbound {}, creating without TLS", current_inbound.tag);
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
match app_state.xray_service.create_inbound_with_certificate(
|
||||||
|
server_id,
|
||||||
|
&endpoint,
|
||||||
|
¤t_inbound.tag,
|
||||||
|
port,
|
||||||
|
&template.protocol,
|
||||||
|
template.base_settings.clone(),
|
||||||
|
template.stream_settings.clone(),
|
||||||
|
cert_pem.as_deref(),
|
||||||
|
key_pem.as_deref(),
|
||||||
|
).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully added inbound {} to xray server", current_inbound.tag);
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to add inbound {} to xray server: {}", current_inbound.tag, e);
|
||||||
|
// Continue with database update even if xray creation fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update database
|
||||||
|
match inbound_repo.update(inbound_id, inbound_data).await {
|
||||||
|
Ok(updated_inbound) => {
|
||||||
|
// Send sync event for immediate synchronization
|
||||||
|
crate::services::events::send_sync_event(
|
||||||
|
crate::services::events::SyncEvent::InboundChanged(server_id)
|
||||||
|
);
|
||||||
|
Ok(Json(updated_inbound.into()))
|
||||||
|
},
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server inbound by ID
|
||||||
|
pub async fn get_server_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path((server_id, inbound_id)): Path<(Uuid, Uuid)>,
|
||||||
|
) -> Result<Json<server_inbound::ServerInboundResponse>, StatusCode> {
|
||||||
|
let repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Verify the inbound belongs to the server
|
||||||
|
match repo.find_by_id(inbound_id).await {
|
||||||
|
Ok(Some(inbound)) if inbound.server_id == server_id => {
|
||||||
|
Ok(Json(inbound.into()))
|
||||||
|
}
|
||||||
|
Ok(Some(_)) => Err(StatusCode::BAD_REQUEST),
|
||||||
|
Ok(None) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete server inbound
|
||||||
|
pub async fn delete_server_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path((server_id, inbound_id)): Path<(Uuid, Uuid)>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let server_repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get server and inbound info
|
||||||
|
let server = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Verify the inbound belongs to the server
|
||||||
|
let inbound = match inbound_repo.find_by_id(inbound_id).await {
|
||||||
|
Ok(Some(inbound)) if inbound.server_id == server_id => inbound,
|
||||||
|
Ok(Some(_)) => return Err(StatusCode::BAD_REQUEST),
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to remove inbound from xray server first
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
match app_state.xray_service.remove_inbound(server_id, &endpoint, &inbound.tag).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully removed inbound {} from xray server {}", inbound.tag, endpoint);
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove inbound from xray server {}: {}", endpoint, e);
|
||||||
|
// Continue with database deletion even if xray removal fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete from database
|
||||||
|
match inbound_repo.delete(inbound_id).await {
|
||||||
|
Ok(true) => {
|
||||||
|
// Send sync event for immediate synchronization
|
||||||
|
crate::services::events::send_sync_event(
|
||||||
|
crate::services::events::SyncEvent::InboundChanged(server_id)
|
||||||
|
);
|
||||||
|
Ok(StatusCode::NO_CONTENT)
|
||||||
|
},
|
||||||
|
Ok(false) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to server inbound (database only - sync will apply changes)
|
||||||
|
pub async fn add_user_to_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path((server_id, inbound_id)): Path<(Uuid, Uuid)>,
|
||||||
|
JsonExtractor(user_data): JsonExtractor<serde_json::Value>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
use crate::database::entities::inbound_users::CreateInboundUserDto;
|
||||||
|
|
||||||
|
let server_repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get server and inbound to validate they exist
|
||||||
|
let _server = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
let inbound = match inbound_repo.find_by_id(inbound_id).await {
|
||||||
|
Ok(Some(inbound)) => inbound,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Verify inbound belongs to server
|
||||||
|
if inbound.server_id != server_id {
|
||||||
|
return Err(StatusCode::BAD_REQUEST);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract user data
|
||||||
|
let username = user_data["username"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user_data["level"].as_u64().unwrap_or(0) as i32;
|
||||||
|
|
||||||
|
if username.is_empty() {
|
||||||
|
tracing::error!("Missing required user data: username");
|
||||||
|
return Err(StatusCode::BAD_REQUEST);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create inbound user repository
|
||||||
|
let inbound_users_repo = InboundUsersRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Check if username already exists on this inbound
|
||||||
|
if inbound_users_repo.username_exists_on_inbound(&username, inbound_id).await.unwrap_or(false) {
|
||||||
|
tracing::error!("Username '{}' already exists on inbound {}", username, inbound_id);
|
||||||
|
return Err(StatusCode::CONFLICT);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create inbound user DTO
|
||||||
|
let inbound_user_dto = CreateInboundUserDto {
|
||||||
|
server_inbound_id: inbound_id,
|
||||||
|
username: username.clone(),
|
||||||
|
level: Some(level),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create user in database
|
||||||
|
match inbound_users_repo.create(inbound_user_dto).await {
|
||||||
|
Ok(created_user) => {
|
||||||
|
tracing::info!("Inbound user created: username={} email={} server={} inbound={}",
|
||||||
|
username, created_user.email, server_id, inbound_id);
|
||||||
|
|
||||||
|
// Send sync event for immediate synchronization
|
||||||
|
crate::services::events::send_sync_event(
|
||||||
|
crate::services::events::SyncEvent::UserAccessChanged(server_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
tracing::info!("User will be synced to xray server immediately via event");
|
||||||
|
Ok(StatusCode::CREATED)
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to create inbound user: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from server inbound
|
||||||
|
pub async fn remove_user_from_inbound(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path((server_id, inbound_id, email)): Path<(Uuid, Uuid, String)>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let server_repo = ServerRepository::new(app_state.db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Get server and inbound
|
||||||
|
let server = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
let inbound = match inbound_repo.find_by_id(inbound_id).await {
|
||||||
|
Ok(Some(inbound)) => inbound,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Verify inbound belongs to server
|
||||||
|
if inbound.server_id != server_id {
|
||||||
|
return Err(StatusCode::BAD_REQUEST);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get inbound tag
|
||||||
|
let template_repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
let template = match template_repo.find_by_id(inbound.template_id).await {
|
||||||
|
Ok(Some(template)) => template,
|
||||||
|
Ok(None) => return Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
};
|
||||||
|
|
||||||
|
let inbound_tag = &inbound.tag;
|
||||||
|
|
||||||
|
// Remove user from xray server
|
||||||
|
match app_state.xray_service.remove_user(server_id, &format!("{}:{}", server.hostname, server.grpc_port), &inbound_tag, &email).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Successfully removed user {} from server {} inbound {}", email, server_id, inbound_id);
|
||||||
|
Ok(StatusCode::NO_CONTENT)
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove user {} from server {} inbound {}: {}", email, server_id, inbound_id, e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
88
src/web/handlers/templates.rs
Normal file
88
src/web/handlers/templates.rs
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::Json,
|
||||||
|
Json as JsonExtractor,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use crate::{
|
||||||
|
database::{
|
||||||
|
entities::inbound_template,
|
||||||
|
repository::InboundTemplateRepository,
|
||||||
|
},
|
||||||
|
web::AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// List all inbound templates
|
||||||
|
pub async fn list_templates(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<inbound_template::InboundTemplateResponse>>, StatusCode> {
|
||||||
|
let repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_all().await {
|
||||||
|
Ok(templates) => {
|
||||||
|
let responses: Vec<inbound_template::InboundTemplateResponse> = templates
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| t.into())
|
||||||
|
.collect();
|
||||||
|
Ok(Json(responses))
|
||||||
|
}
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get template by ID
|
||||||
|
pub async fn get_template(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<inbound_template::InboundTemplateResponse>, StatusCode> {
|
||||||
|
let repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.find_by_id(id).await {
|
||||||
|
Ok(Some(template)) => Ok(Json(template.into())),
|
||||||
|
Ok(None) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new template
|
||||||
|
pub async fn create_template(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
JsonExtractor(template_data): JsonExtractor<inbound_template::CreateInboundTemplateDto>,
|
||||||
|
) -> Result<Json<inbound_template::InboundTemplateResponse>, StatusCode> {
|
||||||
|
tracing::info!("Creating template: {:?}", template_data);
|
||||||
|
let repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.create(template_data).await {
|
||||||
|
Ok(template) => Ok(Json(template.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update template
|
||||||
|
pub async fn update_template(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
JsonExtractor(template_data): JsonExtractor<inbound_template::UpdateInboundTemplateDto>,
|
||||||
|
) -> Result<Json<inbound_template::InboundTemplateResponse>, StatusCode> {
|
||||||
|
let repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.update(id, template_data).await {
|
||||||
|
Ok(template) => Ok(Json(template.into())),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete template
|
||||||
|
pub async fn delete_template(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let repo = InboundTemplateRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
match repo.delete(id).await {
|
||||||
|
Ok(true) => Ok(StatusCode::NO_CONTENT),
|
||||||
|
Ok(false) => Err(StatusCode::NOT_FOUND),
|
||||||
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
|
}
|
||||||
|
}
|
||||||
206
src/web/handlers/users.rs
Normal file
206
src/web/handlers/users.rs
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, Query, State},
|
||||||
|
http::StatusCode,
|
||||||
|
response::Json,
|
||||||
|
Json as JsonExtractor,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::user::{CreateUserDto, UpdateUserDto, Model as UserModel};
|
||||||
|
use crate::database::repository::UserRepository;
|
||||||
|
use crate::web::AppState;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct PaginationQuery {
|
||||||
|
#[serde(default = "default_page")]
|
||||||
|
pub page: u64,
|
||||||
|
#[serde(default = "default_per_page")]
|
||||||
|
pub per_page: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct SearchQuery {
|
||||||
|
pub q: Option<String>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub pagination: PaginationQuery,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct UsersResponse {
|
||||||
|
pub users: Vec<UserResponse>,
|
||||||
|
pub total: u64,
|
||||||
|
pub page: u64,
|
||||||
|
pub per_page: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct UserResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
pub created_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
pub updated_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_page() -> u64 { 1 }
|
||||||
|
fn default_per_page() -> u64 { 20 }
|
||||||
|
|
||||||
|
impl From<UserModel> for UserResponse {
|
||||||
|
fn from(user: UserModel) -> Self {
|
||||||
|
Self {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
comment: user.comment,
|
||||||
|
telegram_id: user.telegram_id,
|
||||||
|
created_at: user.created_at,
|
||||||
|
updated_at: user.updated_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all users with pagination
|
||||||
|
pub async fn get_users(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Query(query): Query<PaginationQuery>,
|
||||||
|
) -> Result<Json<UsersResponse>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let users = repo.get_all(query.page, query.per_page)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
let total = repo.count()
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
let response = UsersResponse {
|
||||||
|
users: users.into_iter().map(UserResponse::from).collect(),
|
||||||
|
total,
|
||||||
|
page: query.page,
|
||||||
|
per_page: query.per_page,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(response))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Search users by name
|
||||||
|
pub async fn search_users(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Query(query): Query<SearchQuery>,
|
||||||
|
) -> Result<Json<UsersResponse>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let users = if let Some(search_query) = query.q {
|
||||||
|
repo.search_by_name(&search_query, query.pagination.page, query.pagination.per_page)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||||
|
} else {
|
||||||
|
repo.get_all(query.pagination.page, query.pagination.per_page)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let total = repo.count()
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
let response = UsersResponse {
|
||||||
|
users: users.into_iter().map(UserResponse::from).collect(),
|
||||||
|
total,
|
||||||
|
page: query.pagination.page,
|
||||||
|
per_page: query.pagination.per_page,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(response))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user by ID
|
||||||
|
pub async fn get_user(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<UserResponse>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let user = repo.get_by_id(id)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
match user {
|
||||||
|
Some(user) => Ok(Json(UserResponse::from(user))),
|
||||||
|
None => Err(StatusCode::NOT_FOUND),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new user
|
||||||
|
pub async fn create_user(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
JsonExtractor(dto): JsonExtractor<CreateUserDto>,
|
||||||
|
) -> Result<Json<UserResponse>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Check if telegram ID is already in use
|
||||||
|
if let Some(telegram_id) = dto.telegram_id {
|
||||||
|
let exists = repo.telegram_id_exists(telegram_id)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
return Err(StatusCode::CONFLICT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = repo.create(dto)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
Ok(Json(UserResponse::from(user)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update user by ID
|
||||||
|
pub async fn update_user(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
JsonExtractor(dto): JsonExtractor<UpdateUserDto>,
|
||||||
|
) -> Result<Json<UserResponse>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
// Check if telegram ID is already in use by another user
|
||||||
|
if let Some(telegram_id) = dto.telegram_id {
|
||||||
|
if let Some(existing_user) = repo.get_by_telegram_id(telegram_id).await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? {
|
||||||
|
if existing_user.id != id {
|
||||||
|
return Err(StatusCode::CONFLICT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = repo.update(id, dto)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
match user {
|
||||||
|
Some(user) => Ok(Json(UserResponse::from(user))),
|
||||||
|
None => Err(StatusCode::NOT_FOUND),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete user by ID
|
||||||
|
pub async fn delete_user(
|
||||||
|
State(app_state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<Value>, StatusCode> {
|
||||||
|
let repo = UserRepository::new(app_state.db.connection().clone());
|
||||||
|
|
||||||
|
let deleted = repo.delete(id)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
if deleted {
|
||||||
|
Ok(Json(json!({ "message": "User deleted successfully" })))
|
||||||
|
} else {
|
||||||
|
Err(StatusCode::NOT_FOUND)
|
||||||
|
}
|
||||||
|
}
|
||||||
70
src/web/mod.rs
Normal file
70
src/web/mod.rs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use axum::{
|
||||||
|
Router,
|
||||||
|
routing::get,
|
||||||
|
http::StatusCode,
|
||||||
|
response::Json,
|
||||||
|
serve,
|
||||||
|
};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use tokio::net::TcpListener;
|
||||||
|
use tower_http::cors::CorsLayer;
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use crate::config::WebConfig;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
use crate::services::XrayService;
|
||||||
|
|
||||||
|
pub mod handlers;
|
||||||
|
pub mod routes;
|
||||||
|
|
||||||
|
use routes::api_routes;
|
||||||
|
|
||||||
|
/// Application state shared across handlers
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AppState {
|
||||||
|
pub db: DatabaseManager,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub config: WebConfig,
|
||||||
|
pub xray_service: XrayService,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start the web server
|
||||||
|
pub async fn start_server(db: DatabaseManager, config: WebConfig) -> Result<()> {
|
||||||
|
let xray_service = XrayService::new();
|
||||||
|
|
||||||
|
let app_state = AppState {
|
||||||
|
db,
|
||||||
|
config: config.clone(),
|
||||||
|
xray_service,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Serve static files
|
||||||
|
let serve_dir = ServeDir::new("static");
|
||||||
|
|
||||||
|
let app = Router::new()
|
||||||
|
.route("/health", get(health_check))
|
||||||
|
.nest("/api", api_routes())
|
||||||
|
.nest_service("/", serve_dir)
|
||||||
|
.layer(CorsLayer::permissive())
|
||||||
|
.with_state(app_state);
|
||||||
|
|
||||||
|
let addr: SocketAddr = format!("{}:{}", config.host, config.port).parse()?;
|
||||||
|
info!("Starting web server on {}", addr);
|
||||||
|
|
||||||
|
let listener = TcpListener::bind(&addr).await?;
|
||||||
|
serve(listener, app).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Health check endpoint
|
||||||
|
async fn health_check() -> Result<Json<Value>, StatusCode> {
|
||||||
|
Ok(Json(json!({
|
||||||
|
"status": "ok",
|
||||||
|
"service": "xray-admin",
|
||||||
|
"version": env!("CARGO_PKG_VERSION")
|
||||||
|
})))
|
||||||
|
}
|
||||||
27
src/web/routes/mod.rs
Normal file
27
src/web/routes/mod.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
use axum::{
|
||||||
|
Router,
|
||||||
|
routing::get,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::web::{AppState, handlers};
|
||||||
|
|
||||||
|
pub mod servers;
|
||||||
|
|
||||||
|
/// Create API routes
|
||||||
|
pub fn api_routes() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
.nest("/users", user_routes())
|
||||||
|
.nest("/servers", servers::server_routes())
|
||||||
|
.nest("/certificates", servers::certificate_routes())
|
||||||
|
.nest("/templates", servers::template_routes())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User management routes
|
||||||
|
fn user_routes() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(handlers::get_users).post(handlers::create_user))
|
||||||
|
.route("/search", get(handlers::search_users))
|
||||||
|
.route("/:id", get(handlers::get_user)
|
||||||
|
.put(handlers::update_user)
|
||||||
|
.delete(handlers::delete_user))
|
||||||
|
}
|
||||||
38
src/web/routes/servers.rs
Normal file
38
src/web/routes/servers.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
use axum::{
|
||||||
|
routing::{get, post},
|
||||||
|
Router,
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
web::{AppState, handlers},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn server_routes() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
// Server management
|
||||||
|
.route("/", get(handlers::list_servers).post(handlers::create_server))
|
||||||
|
.route("/:id", get(handlers::get_server).put(handlers::update_server).delete(handlers::delete_server))
|
||||||
|
.route("/:id/test", post(handlers::test_server_connection))
|
||||||
|
.route("/:id/stats", get(handlers::get_server_stats))
|
||||||
|
|
||||||
|
// Server inbounds
|
||||||
|
.route("/:server_id/inbounds", get(handlers::list_server_inbounds).post(handlers::create_server_inbound))
|
||||||
|
.route("/:server_id/inbounds/:inbound_id", get(handlers::get_server_inbound).put(handlers::update_server_inbound).delete(handlers::delete_server_inbound))
|
||||||
|
|
||||||
|
// User management for inbounds
|
||||||
|
.route("/:server_id/inbounds/:inbound_id/users", post(handlers::add_user_to_inbound))
|
||||||
|
.route("/:server_id/inbounds/:inbound_id/users/:email", axum::routing::delete(handlers::remove_user_from_inbound))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn certificate_routes() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(handlers::list_certificates).post(handlers::create_certificate))
|
||||||
|
.route("/:id", get(handlers::get_certificate).put(handlers::update_certificate).delete(handlers::delete_certificate))
|
||||||
|
.route("/:id/details", get(handlers::get_certificate_details))
|
||||||
|
.route("/expiring", get(handlers::get_expiring_certificates))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn template_routes() -> Router<AppState> {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(handlers::list_templates).post(handlers::create_template))
|
||||||
|
.route("/:id", get(handlers::get_template).put(handlers::update_template).delete(handlers::delete_template))
|
||||||
|
}
|
||||||
1314
static/admin.html
Normal file
1314
static/admin.html
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,239 +0,0 @@
|
|||||||
/* static/admin/css/main.css */
|
|
||||||
|
|
||||||
/* Bulk Action Section Styling */
|
|
||||||
.bulk-actions-section {
|
|
||||||
background: linear-gradient(135deg, #f8f9fa 0%, #e9ecef 100%);
|
|
||||||
border: 1px solid #dee2e6;
|
|
||||||
border-left: 4px solid #007cba;
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 20px;
|
|
||||||
margin: 20px 0;
|
|
||||||
box-shadow: 0 2px 4px rgba(0,0,0,0.05);
|
|
||||||
}
|
|
||||||
|
|
||||||
.bulk-actions-section h3 {
|
|
||||||
color: #007cba;
|
|
||||||
margin-top: 0;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
font-size: 18px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.bulk-actions-section p {
|
|
||||||
color: #6c757d;
|
|
||||||
margin-bottom: 15px;
|
|
||||||
line-height: 1.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Action Button Styles */
|
|
||||||
.server-action-btn, .bulk-action-btn {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
padding: 10px 15px;
|
|
||||||
border-radius: 6px;
|
|
||||||
text-decoration: none;
|
|
||||||
font-weight: 500;
|
|
||||||
font-size: 14px;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: all 0.3s ease;
|
|
||||||
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
|
||||||
position: relative;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn:before, .bulk-action-btn:before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
left: -100%;
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
background: linear-gradient(90deg, transparent, rgba(255,255,255,0.2), transparent);
|
|
||||||
transition: left 0.5s;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn:hover:before, .bulk-action-btn:hover:before {
|
|
||||||
left: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn:hover, .bulk-action-btn:hover {
|
|
||||||
transform: translateY(-2px);
|
|
||||||
box-shadow: 0 4px 12px rgba(0,0,0,0.15);
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Specific button colors */
|
|
||||||
.btn-move-clients {
|
|
||||||
background-color: #007cba;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-move-clients:hover {
|
|
||||||
background-color: #005a8b !important;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-purge-users {
|
|
||||||
background-color: #dc3545;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-purge-users:hover {
|
|
||||||
background-color: #c82333 !important;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Server list action buttons */
|
|
||||||
.field-server_actions {
|
|
||||||
min-width: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-server_actions .server-action-btn {
|
|
||||||
padding: 5px 8px;
|
|
||||||
font-size: 11px;
|
|
||||||
gap: 4px;
|
|
||||||
margin: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Server statistics section */
|
|
||||||
.server-stats-section {
|
|
||||||
background-color: #e8f4fd;
|
|
||||||
border: 1px solid #bee5eb;
|
|
||||||
border-radius: 6px;
|
|
||||||
padding: 12px;
|
|
||||||
margin: 15px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-stats-grid {
|
|
||||||
display: flex;
|
|
||||||
gap: 20px;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-label {
|
|
||||||
color: #495057;
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-value {
|
|
||||||
color: #007cba;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tip section styling */
|
|
||||||
.tip-section {
|
|
||||||
background-color: rgba(255, 243, 205, 0.8);
|
|
||||||
border-left: 4px solid #ffc107;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 12px;
|
|
||||||
margin-top: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip-section small {
|
|
||||||
color: #856404;
|
|
||||||
line-height: 1.4;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Loading states */
|
|
||||||
.server-action-btn.loading {
|
|
||||||
pointer-events: none;
|
|
||||||
opacity: 0.7;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn.loading:after {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
width: 16px;
|
|
||||||
height: 16px;
|
|
||||||
margin: auto;
|
|
||||||
border: 2px solid transparent;
|
|
||||||
border-top-color: #ffffff;
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: spin 1s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin {
|
|
||||||
0% { transform: rotate(0deg); }
|
|
||||||
100% { transform: rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive design */
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.bulk-actions-section {
|
|
||||||
padding: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn, .bulk-action-btn {
|
|
||||||
width: 100%;
|
|
||||||
justify-content: center;
|
|
||||||
margin-bottom: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-stats-grid {
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: flex-start;
|
|
||||||
gap: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-server_actions > div {
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-server_actions .server-action-btn {
|
|
||||||
width: 100%;
|
|
||||||
justify-content: center;
|
|
||||||
margin: 2px 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 480px) {
|
|
||||||
.bulk-actions-section h3 {
|
|
||||||
font-size: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-action-btn, .bulk-action-btn {
|
|
||||||
font-size: 13px;
|
|
||||||
padding: 8px 12px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Dark mode support */
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
.bulk-actions-section {
|
|
||||||
background: linear-gradient(135deg, #2d3748 0%, #4a5568 100%);
|
|
||||||
border-color: #4a5568;
|
|
||||||
color: #e2e8f0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.bulk-actions-section h3 {
|
|
||||||
color: #63b3ed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.bulk-actions-section p {
|
|
||||||
color: #a0aec0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-stats-section {
|
|
||||||
background-color: #2d3748;
|
|
||||||
border-color: #4a5568;
|
|
||||||
color: #e2e8f0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-label {
|
|
||||||
color: #a0aec0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.stat-value {
|
|
||||||
color: #63b3ed;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,342 +0,0 @@
|
|||||||
/* Custom styles for VPN admin interface */
|
|
||||||
|
|
||||||
/* Quick action buttons in server list */
|
|
||||||
.quick-actions .button {
|
|
||||||
display: inline-block;
|
|
||||||
padding: 4px 8px;
|
|
||||||
margin: 0 2px;
|
|
||||||
font-size: 11px;
|
|
||||||
line-height: 1.2;
|
|
||||||
text-decoration: none;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
border-radius: 3px;
|
|
||||||
background: linear-gradient(to bottom, #f8f8f8, #e8e8e8);
|
|
||||||
color: #333;
|
|
||||||
cursor: pointer;
|
|
||||||
white-space: nowrap;
|
|
||||||
min-width: 60px;
|
|
||||||
text-align: center;
|
|
||||||
box-shadow: 0 1px 2px rgba(0,0,0,0.1);
|
|
||||||
transition: all 0.2s ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.quick-actions .button:hover {
|
|
||||||
background: linear-gradient(to bottom, #e8e8e8, #d8d8d8);
|
|
||||||
border-color: #bbb;
|
|
||||||
color: #000;
|
|
||||||
text-decoration: none;
|
|
||||||
box-shadow: 0 2px 4px rgba(0,0,0,0.15);
|
|
||||||
}
|
|
||||||
|
|
||||||
.quick-actions .button:active {
|
|
||||||
background: linear-gradient(to bottom, #d8d8d8, #e8e8e8);
|
|
||||||
box-shadow: inset 0 1px 2px rgba(0,0,0,0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Sync button - blue theme */
|
|
||||||
.quick-actions .button[href*="/sync/"] {
|
|
||||||
background: linear-gradient(to bottom, #4a90e2, #357abd);
|
|
||||||
border-color: #2968a3;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.quick-actions .button[href*="/sync/"]:hover {
|
|
||||||
background: linear-gradient(to bottom, #357abd, #2968a3);
|
|
||||||
border-color: #1f5582;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Move clients button - orange theme */
|
|
||||||
.quick-actions .button[href*="/move-clients/"] {
|
|
||||||
background: linear-gradient(to bottom, #f39c12, #e67e22);
|
|
||||||
border-color: #d35400;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
.quick-actions .button[href*="/move-clients/"]:hover {
|
|
||||||
background: linear-gradient(to bottom, #e67e22, #d35400);
|
|
||||||
border-color: #bf4f36;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Status indicators improvements */
|
|
||||||
.server-status-ok {
|
|
||||||
color: #27ae60;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-status-error {
|
|
||||||
color: #e74c3c;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-status-warning {
|
|
||||||
color: #f39c12;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Better spacing for list display */
|
|
||||||
.admin-object-tools {
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Improve readability of pre-formatted status */
|
|
||||||
.changelist-results pre {
|
|
||||||
font-size: 11px;
|
|
||||||
margin: 0;
|
|
||||||
padding: 2px 4px;
|
|
||||||
background: #f8f8f8;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 3px;
|
|
||||||
max-width: 200px;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Server admin compact styles */
|
|
||||||
.server-stats {
|
|
||||||
max-width: 120px;
|
|
||||||
min-width: 90px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-activity {
|
|
||||||
max-width: 140px;
|
|
||||||
min-width: 100px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-status {
|
|
||||||
max-width: 160px;
|
|
||||||
min-width: 120px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-comment {
|
|
||||||
max-width: 200px;
|
|
||||||
min-width: 100px;
|
|
||||||
word-wrap: break-word;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Compact server display elements */
|
|
||||||
.changelist-results .server-stats div,
|
|
||||||
.changelist-results .server-activity div,
|
|
||||||
.changelist-results .server-status div {
|
|
||||||
line-height: 1.3;
|
|
||||||
margin: 1px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Status indicator colors */
|
|
||||||
.status-online {
|
|
||||||
color: #16a34a !important;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-error {
|
|
||||||
color: #dc2626 !important;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-warning {
|
|
||||||
color: #f97316 !important;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.status-unavailable {
|
|
||||||
color: #f97316 !important;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Activity indicators */
|
|
||||||
.activity-high {
|
|
||||||
color: #16a34a !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.activity-medium {
|
|
||||||
color: #eab308 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.activity-low {
|
|
||||||
color: #f97316 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.activity-none {
|
|
||||||
color: #dc2626 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* User stats indicators */
|
|
||||||
.users-active {
|
|
||||||
color: #16a34a !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.users-medium {
|
|
||||||
color: #eab308 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.users-low {
|
|
||||||
color: #f97316 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.users-none {
|
|
||||||
color: #9ca3af !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Table cell width constraints for better layout */
|
|
||||||
table.changelist-results th:nth-child(1), /* Name */
|
|
||||||
table.changelist-results td:nth-child(1) {
|
|
||||||
width: 180px;
|
|
||||||
max-width: 180px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.changelist-results th:nth-child(3), /* Comment */
|
|
||||||
table.changelist-results td:nth-child(3) {
|
|
||||||
width: 200px;
|
|
||||||
max-width: 200px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.changelist-results th:nth-child(4), /* User Stats */
|
|
||||||
table.changelist-results td:nth-child(4) {
|
|
||||||
width: 120px;
|
|
||||||
max-width: 120px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.changelist-results th:nth-child(5), /* Activity */
|
|
||||||
table.changelist-results td:nth-child(5) {
|
|
||||||
width: 140px;
|
|
||||||
max-width: 140px;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.changelist-results th:nth-child(6), /* Status */
|
|
||||||
table.changelist-results td:nth-child(6) {
|
|
||||||
width: 160px;
|
|
||||||
max-width: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Ensure text doesn't overflow in server admin */
|
|
||||||
.changelist-results td {
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
white-space: nowrap;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Allow wrapping for multi-line server info displays */
|
|
||||||
.changelist-results td .server-stats,
|
|
||||||
.changelist-results td .server-activity,
|
|
||||||
.changelist-results td .server-status {
|
|
||||||
white-space: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Server type icons */
|
|
||||||
.server-type-outline {
|
|
||||||
color: #3b82f6;
|
|
||||||
}
|
|
||||||
|
|
||||||
.server-type-wireguard {
|
|
||||||
color: #10b981;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tooltip styles for truncated text */
|
|
||||||
[title] {
|
|
||||||
cursor: help;
|
|
||||||
border-bottom: 1px dotted #999;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form improvements for move clients page */
|
|
||||||
.form-row.field-box {
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 10px;
|
|
||||||
margin: 10px 0;
|
|
||||||
background: #f9f9f9;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-row.field-box label {
|
|
||||||
font-weight: bold;
|
|
||||||
color: #333;
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.form-row.field-box .readonly {
|
|
||||||
padding: 5px;
|
|
||||||
background: white;
|
|
||||||
border: 1px solid #ddd;
|
|
||||||
border-radius: 3px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.help {
|
|
||||||
background: #e8f4fd;
|
|
||||||
border: 1px solid #b8daff;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 15px;
|
|
||||||
margin: 20px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.help h3 {
|
|
||||||
margin-top: 0;
|
|
||||||
color: #0066cc;
|
|
||||||
}
|
|
||||||
|
|
||||||
.help ul {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.help li {
|
|
||||||
margin-bottom: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Make user statistics section wider */
|
|
||||||
.field-user_statistics_summary {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-user_statistics_summary .readonly {
|
|
||||||
max-width: none !important;
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-user_statistics_summary .user-management-section {
|
|
||||||
width: 100% !important;
|
|
||||||
max-width: none !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Wider fieldset for statistics */
|
|
||||||
.wide {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wide .form-row {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Server status button styles */
|
|
||||||
.check-status-btn {
|
|
||||||
transition: all 0.2s ease;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.check-status-btn:hover {
|
|
||||||
opacity: 0.8;
|
|
||||||
transform: scale(1.05);
|
|
||||||
}
|
|
||||||
|
|
||||||
.check-status-btn:disabled {
|
|
||||||
opacity: 0.6;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Make admin tables more responsive */
|
|
||||||
.changelist-results table {
|
|
||||||
width: 100%;
|
|
||||||
table-layout: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Improve button spacing */
|
|
||||||
.btn-sm-custom {
|
|
||||||
margin: 0 2px;
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Hide xray-subscriptions tab if it appears */
|
|
||||||
#xray-subscriptions-tab,
|
|
||||||
a[href="#xray-subscriptions-tab"],
|
|
||||||
li:has(a[href="#xray-subscriptions-tab"]) {
|
|
||||||
display: none !important;
|
|
||||||
}
|
|
||||||
@@ -1,203 +0,0 @@
|
|||||||
// static/admin/js/generate_uuid.js
|
|
||||||
|
|
||||||
function generateLink(button) {
|
|
||||||
let row = button.closest('tr');
|
|
||||||
let inputField = row.querySelector('input[name$="link"]');
|
|
||||||
|
|
||||||
if (inputField) {
|
|
||||||
inputField.value = generateRandomString(16);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateRandomString(length) {
|
|
||||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
|
||||||
let result = '';
|
|
||||||
for (let i = 0; i < length; i++) {
|
|
||||||
result += chars.charAt(Math.floor(Math.random() * chars.length));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// OutlineServer JSON Configuration Functionality
|
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
|
||||||
|
|
||||||
// JSON Import functionality
|
|
||||||
const importJsonBtn = document.getElementById('import-json-btn');
|
|
||||||
const importJsonTextarea = document.getElementById('import-json-config');
|
|
||||||
|
|
||||||
if (importJsonBtn && importJsonTextarea) {
|
|
||||||
// Auto-fill on paste event
|
|
||||||
importJsonTextarea.addEventListener('paste', function(e) {
|
|
||||||
// Small delay to let paste complete
|
|
||||||
setTimeout(() => {
|
|
||||||
tryAutoFillFromJson();
|
|
||||||
}, 100);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Manual import button
|
|
||||||
importJsonBtn.addEventListener('click', function() {
|
|
||||||
tryAutoFillFromJson();
|
|
||||||
});
|
|
||||||
|
|
||||||
function tryAutoFillFromJson() {
|
|
||||||
try {
|
|
||||||
const jsonText = importJsonTextarea.value.trim();
|
|
||||||
if (!jsonText) {
|
|
||||||
alert('Please enter JSON configuration');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = JSON.parse(jsonText);
|
|
||||||
|
|
||||||
// Validate required fields
|
|
||||||
if (!config.apiUrl || !config.certSha256) {
|
|
||||||
alert('Invalid JSON format. Required fields: apiUrl, certSha256');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse apiUrl to extract components
|
|
||||||
const url = new URL(config.apiUrl);
|
|
||||||
|
|
||||||
// Fill form fields
|
|
||||||
const adminUrlField = document.getElementById('id_admin_url');
|
|
||||||
const adminCertField = document.getElementById('id_admin_access_cert');
|
|
||||||
const clientHostnameField = document.getElementById('id_client_hostname');
|
|
||||||
const clientPortField = document.getElementById('id_client_port');
|
|
||||||
const nameField = document.getElementById('id_name');
|
|
||||||
const commentField = document.getElementById('id_comment');
|
|
||||||
|
|
||||||
if (adminUrlField) adminUrlField.value = config.apiUrl;
|
|
||||||
if (adminCertField) adminCertField.value = config.certSha256;
|
|
||||||
|
|
||||||
// Use provided hostname or extract from URL
|
|
||||||
const hostname = config.clientHostname || config.hostnameForAccessKeys || url.hostname;
|
|
||||||
if (clientHostnameField) clientHostnameField.value = hostname;
|
|
||||||
|
|
||||||
// Use provided port or extract from various sources
|
|
||||||
const clientPort = config.clientPort || config.portForNewAccessKeys || url.port || '1257';
|
|
||||||
if (clientPortField) clientPortField.value = clientPort;
|
|
||||||
|
|
||||||
// Generate server name if not provided and field is empty
|
|
||||||
if (nameField && !nameField.value) {
|
|
||||||
const serverName = config.serverName || config.name || `Outline-${hostname}`;
|
|
||||||
nameField.value = serverName;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fill comment if provided and field exists
|
|
||||||
if (commentField && config.comment) {
|
|
||||||
commentField.value = config.comment;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear the JSON input
|
|
||||||
importJsonTextarea.value = '';
|
|
||||||
|
|
||||||
// Show success message
|
|
||||||
showSuccessMessage('✅ Configuration imported successfully!');
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
alert('Invalid JSON format: ' + error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy to clipboard functionality
|
|
||||||
window.copyToClipboard = function(elementId) {
|
|
||||||
const element = document.getElementById(elementId);
|
|
||||||
if (element) {
|
|
||||||
const text = element.textContent || element.innerText;
|
|
||||||
|
|
||||||
if (navigator.clipboard && window.isSecureContext) {
|
|
||||||
navigator.clipboard.writeText(text).then(() => {
|
|
||||||
showCopySuccess();
|
|
||||||
}).catch(err => {
|
|
||||||
fallbackCopyTextToClipboard(text);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
fallbackCopyTextToClipboard(text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
function fallbackCopyTextToClipboard(text) {
|
|
||||||
const textArea = document.createElement('textarea');
|
|
||||||
textArea.value = text;
|
|
||||||
textArea.style.position = 'fixed';
|
|
||||||
textArea.style.left = '-999999px';
|
|
||||||
textArea.style.top = '-999999px';
|
|
||||||
document.body.appendChild(textArea);
|
|
||||||
textArea.focus();
|
|
||||||
textArea.select();
|
|
||||||
|
|
||||||
try {
|
|
||||||
document.execCommand('copy');
|
|
||||||
showCopySuccess();
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Failed to copy text: ', err);
|
|
||||||
}
|
|
||||||
|
|
||||||
document.body.removeChild(textArea);
|
|
||||||
}
|
|
||||||
|
|
||||||
function showCopySuccess() {
|
|
||||||
showSuccessMessage('📋 Copied to clipboard!');
|
|
||||||
}
|
|
||||||
|
|
||||||
function showSuccessMessage(message) {
|
|
||||||
const alertHtml = `
|
|
||||||
<div class="alert alert-success alert-dismissible" style="margin: 1rem 0;">
|
|
||||||
${message}
|
|
||||||
<button type="button" class="close" aria-label="Close" onclick="this.parentElement.remove()">
|
|
||||||
<span aria-hidden="true">×</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
|
|
||||||
// Try to find a container for the message
|
|
||||||
const container = document.querySelector('.card-body') || document.querySelector('#content-main');
|
|
||||||
if (container) {
|
|
||||||
container.insertAdjacentHTML('afterbegin', alertHtml);
|
|
||||||
}
|
|
||||||
|
|
||||||
setTimeout(() => {
|
|
||||||
const alert = document.querySelector('.alert-success');
|
|
||||||
if (alert) alert.remove();
|
|
||||||
}, 5000);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sync server button - handle both static and dynamic buttons
|
|
||||||
document.addEventListener('click', async function(e) {
|
|
||||||
if (e.target && (e.target.id === 'sync-server-btn' || e.target.matches('[id="sync-server-btn"]'))) {
|
|
||||||
const syncBtn = e.target;
|
|
||||||
const serverId = syncBtn.dataset.serverId;
|
|
||||||
const csrfToken = document.querySelector('[name=csrfmiddlewaretoken]').value;
|
|
||||||
|
|
||||||
const originalText = syncBtn.textContent;
|
|
||||||
syncBtn.textContent = '⏳ Syncing...';
|
|
||||||
syncBtn.disabled = true;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`/admin/vpn/outlineserver/${serverId}/sync/`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
|
||||||
'X-CSRFToken': csrfToken
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (data.success) {
|
|
||||||
showSuccessMessage(`✅ ${data.message}`);
|
|
||||||
setTimeout(() => window.location.reload(), 2000);
|
|
||||||
} else {
|
|
||||||
alert('Sync failed: ' + data.error);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
alert('Network error: ' + error.message);
|
|
||||||
} finally {
|
|
||||||
syncBtn.textContent = originalText;
|
|
||||||
syncBtn.disabled = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
// Server status check functionality for admin
|
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
|
||||||
// Add event listeners to all check status buttons
|
|
||||||
document.querySelectorAll('.check-status-btn').forEach(button => {
|
|
||||||
button.addEventListener('click', async function(e) {
|
|
||||||
e.preventDefault();
|
|
||||||
|
|
||||||
const serverId = this.dataset.serverId;
|
|
||||||
const serverName = this.dataset.serverName;
|
|
||||||
const serverType = this.dataset.serverType;
|
|
||||||
const originalText = this.textContent;
|
|
||||||
const originalColor = this.style.background;
|
|
||||||
|
|
||||||
// Show loading state
|
|
||||||
this.textContent = '⏳ Checking...';
|
|
||||||
this.style.background = '#6c757d';
|
|
||||||
this.disabled = true;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Try AJAX request first
|
|
||||||
const response = await fetch(`/admin/vpn/server/${serverId}/check-status/`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
|
||||||
'X-CSRFToken': getCookie('csrftoken')
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (data.success) {
|
|
||||||
// Update button based on status
|
|
||||||
if (data.status === 'online') {
|
|
||||||
this.textContent = '✅ Online';
|
|
||||||
this.style.background = '#28a745';
|
|
||||||
} else if (data.status === 'offline') {
|
|
||||||
this.textContent = '❌ Offline';
|
|
||||||
this.style.background = '#dc3545';
|
|
||||||
} else if (data.status === 'error') {
|
|
||||||
this.textContent = '⚠️ Error';
|
|
||||||
this.style.background = '#fd7e14';
|
|
||||||
} else {
|
|
||||||
this.textContent = '❓ Unknown';
|
|
||||||
this.style.background = '#6c757d';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show additional info if available
|
|
||||||
if (data.message) {
|
|
||||||
this.title = data.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
throw new Error(data.error || 'Failed to check status');
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error checking server status:', error);
|
|
||||||
|
|
||||||
// Fallback: show basic server info
|
|
||||||
this.textContent = `📊 ${serverType}`;
|
|
||||||
this.style.background = '#17a2b8';
|
|
||||||
this.title = `Server: ${serverName} (${serverType}) - Status check failed: ${error.message}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset after 5 seconds in all cases
|
|
||||||
setTimeout(() => {
|
|
||||||
this.textContent = originalText;
|
|
||||||
this.style.background = originalColor;
|
|
||||||
this.title = '';
|
|
||||||
this.disabled = false;
|
|
||||||
}, 5000);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Helper function to get CSRF token
|
|
||||||
function getCookie(name) {
|
|
||||||
let cookieValue = null;
|
|
||||||
if (document.cookie && document.cookie !== '') {
|
|
||||||
const cookies = document.cookie.split(';');
|
|
||||||
for (let i = 0; i < cookies.length; i++) {
|
|
||||||
const cookie = cookies[i].trim();
|
|
||||||
if (cookie.substring(0, name.length + 1) === (name + '=')) {
|
|
||||||
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return cookieValue;
|
|
||||||
}
|
|
||||||
@@ -1,289 +0,0 @@
|
|||||||
// Xray Inbound Auto-Fill Helper
|
|
||||||
console.log('Xray inbound helper script loaded');
|
|
||||||
|
|
||||||
// Protocol configurations based on Xray documentation
|
|
||||||
const protocolConfigs = {
|
|
||||||
'vless': {
|
|
||||||
port: 443,
|
|
||||||
network: 'tcp',
|
|
||||||
security: 'tls',
|
|
||||||
description: 'VLESS - Lightweight protocol with UUID authentication'
|
|
||||||
},
|
|
||||||
'vmess': {
|
|
||||||
port: 443,
|
|
||||||
network: 'ws',
|
|
||||||
security: 'tls',
|
|
||||||
description: 'VMess - V2Ray protocol with encryption and authentication'
|
|
||||||
},
|
|
||||||
'trojan': {
|
|
||||||
port: 443,
|
|
||||||
network: 'tcp',
|
|
||||||
security: 'tls',
|
|
||||||
description: 'Trojan - TLS-based protocol mimicking HTTPS traffic'
|
|
||||||
},
|
|
||||||
'shadowsocks': {
|
|
||||||
port: 8388,
|
|
||||||
network: 'tcp',
|
|
||||||
security: 'none',
|
|
||||||
ss_method: 'aes-256-gcm',
|
|
||||||
description: 'Shadowsocks - SOCKS5 proxy with encryption'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Initialize when DOM is ready
|
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
|
||||||
console.log('DOM ready, initializing Xray helper');
|
|
||||||
|
|
||||||
// Add help text and generate buttons
|
|
||||||
addHelpText();
|
|
||||||
addGenerateButtons();
|
|
||||||
|
|
||||||
// Watch for protocol field changes
|
|
||||||
const protocolField = document.getElementById('id_protocol');
|
|
||||||
if (protocolField) {
|
|
||||||
protocolField.addEventListener('change', function() {
|
|
||||||
handleProtocolChange(this.value);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Auto-fill on initial load if new inbound
|
|
||||||
if (protocolField.value && isNewInbound()) {
|
|
||||||
handleProtocolChange(protocolField.value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function isNewInbound() {
|
|
||||||
// Check if this is a new inbound (no port value set)
|
|
||||||
const portField = document.getElementById('id_port');
|
|
||||||
return !portField || !portField.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleProtocolChange(protocol) {
|
|
||||||
if (!protocol || !protocolConfigs[protocol]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = protocolConfigs[protocol];
|
|
||||||
|
|
||||||
// Only auto-fill for new inbounds to avoid overwriting user data
|
|
||||||
if (isNewInbound()) {
|
|
||||||
console.log('Auto-filling fields for new', protocol, 'inbound');
|
|
||||||
autoFillFields(protocol, config);
|
|
||||||
showMessage(`Auto-filled ${protocol.toUpperCase()} configuration`, 'info');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function autoFillFields(protocol, config) {
|
|
||||||
// Fill basic fields only if they're empty
|
|
||||||
fillIfEmpty('id_port', config.port);
|
|
||||||
fillIfEmpty('id_network', config.network);
|
|
||||||
fillIfEmpty('id_security', config.security);
|
|
||||||
|
|
||||||
// Protocol-specific fields
|
|
||||||
if (config.ss_method && protocol === 'shadowsocks') {
|
|
||||||
fillIfEmpty('id_ss_method', config.ss_method);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate helpful JSON configs
|
|
||||||
generateJsonConfigs(protocol, config);
|
|
||||||
}
|
|
||||||
|
|
||||||
function fillIfEmpty(fieldId, value) {
|
|
||||||
const field = document.getElementById(fieldId);
|
|
||||||
if (field && !field.value && value !== undefined) {
|
|
||||||
field.value = value;
|
|
||||||
field.dispatchEvent(new Event('change', { bubbles: true }));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateJsonConfigs(protocol, config) {
|
|
||||||
// Generate stream settings
|
|
||||||
const streamField = document.getElementById('id_stream_settings');
|
|
||||||
if (streamField && !streamField.value) {
|
|
||||||
const streamSettings = getStreamSettings(protocol, config.network);
|
|
||||||
if (streamSettings) {
|
|
||||||
streamField.value = JSON.stringify(streamSettings, null, 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate sniffing settings
|
|
||||||
const sniffingField = document.getElementById('id_sniffing_settings');
|
|
||||||
if (sniffingField && !sniffingField.value) {
|
|
||||||
const sniffingSettings = {
|
|
||||||
enabled: true,
|
|
||||||
destOverride: ['http', 'tls'],
|
|
||||||
metadataOnly: false
|
|
||||||
};
|
|
||||||
sniffingField.value = JSON.stringify(sniffingSettings, null, 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getStreamSettings(protocol, network) {
|
|
||||||
const settings = {};
|
|
||||||
|
|
||||||
switch (network) {
|
|
||||||
case 'ws':
|
|
||||||
settings.wsSettings = {
|
|
||||||
path: '/ws',
|
|
||||||
headers: {
|
|
||||||
Host: 'example.com'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
case 'grpc':
|
|
||||||
settings.grpcSettings = {
|
|
||||||
serviceName: 'GunService'
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
case 'h2':
|
|
||||||
settings.httpSettings = {
|
|
||||||
host: ['example.com'],
|
|
||||||
path: '/path'
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
case 'tcp':
|
|
||||||
settings.tcpSettings = {
|
|
||||||
header: {
|
|
||||||
type: 'none'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
case 'kcp':
|
|
||||||
settings.kcpSettings = {
|
|
||||||
mtu: 1350,
|
|
||||||
tti: 50,
|
|
||||||
uplinkCapacity: 5,
|
|
||||||
downlinkCapacity: 20,
|
|
||||||
congestion: false,
|
|
||||||
readBufferSize: 2,
|
|
||||||
writeBufferSize: 2,
|
|
||||||
header: {
|
|
||||||
type: 'none'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Object.keys(settings).length > 0 ? settings : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function addHelpText() {
|
|
||||||
// Add help text to complex fields
|
|
||||||
addFieldHelp('id_stream_settings',
|
|
||||||
'Transport settings: TCP (none), WebSocket (path/host), gRPC (serviceName), etc. Format: JSON');
|
|
||||||
|
|
||||||
addFieldHelp('id_sniffing_settings',
|
|
||||||
'Traffic sniffing for routing: enabled, destOverride ["http","tls"], metadataOnly');
|
|
||||||
|
|
||||||
addFieldHelp('id_tls_cert_file',
|
|
||||||
'TLS certificate file path (required for TLS security). Example: /path/to/cert.pem');
|
|
||||||
|
|
||||||
addFieldHelp('id_tls_key_file',
|
|
||||||
'TLS private key file path (required for TLS security). Example: /path/to/key.pem');
|
|
||||||
|
|
||||||
addFieldHelp('id_protocol',
|
|
||||||
'VLESS: lightweight + UUID | VMess: V2Ray encrypted | Trojan: HTTPS-like | Shadowsocks: SOCKS5');
|
|
||||||
|
|
||||||
addFieldHelp('id_network',
|
|
||||||
'Transport: tcp (direct), ws (WebSocket), grpc (HTTP/2), h2 (HTTP/2), kcp (mKCP)');
|
|
||||||
|
|
||||||
addFieldHelp('id_security',
|
|
||||||
'Encryption: none (no TLS), tls (standard TLS), reality (advanced steganography)');
|
|
||||||
}
|
|
||||||
|
|
||||||
function addFieldHelp(fieldId, helpText) {
|
|
||||||
const field = document.getElementById(fieldId);
|
|
||||||
if (!field) return;
|
|
||||||
|
|
||||||
const helpDiv = document.createElement('div');
|
|
||||||
helpDiv.className = 'help';
|
|
||||||
helpDiv.style.cssText = 'font-size: 11px; color: #666; margin-top: 2px; line-height: 1.3;';
|
|
||||||
helpDiv.textContent = helpText;
|
|
||||||
|
|
||||||
field.parentNode.appendChild(helpDiv);
|
|
||||||
}
|
|
||||||
|
|
||||||
function showMessage(message, type = 'info') {
|
|
||||||
const messageDiv = document.createElement('div');
|
|
||||||
messageDiv.className = `alert alert-${type}`;
|
|
||||||
messageDiv.style.cssText = `
|
|
||||||
position: fixed;
|
|
||||||
top: 20px;
|
|
||||||
right: 20px;
|
|
||||||
z-index: 9999;
|
|
||||||
padding: 12px 20px;
|
|
||||||
border-radius: 4px;
|
|
||||||
background: ${type === 'success' ? '#d4edda' : '#cce7ff'};
|
|
||||||
border: 1px solid ${type === 'success' ? '#c3e6cb' : '#b8daff'};
|
|
||||||
color: ${type === 'success' ? '#155724' : '#004085'};
|
|
||||||
font-weight: 500;
|
|
||||||
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
|
|
||||||
`;
|
|
||||||
messageDiv.textContent = message;
|
|
||||||
|
|
||||||
document.body.appendChild(messageDiv);
|
|
||||||
|
|
||||||
setTimeout(() => {
|
|
||||||
messageDiv.remove();
|
|
||||||
}, 3000);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper functions for generating values
|
|
||||||
function generateRandomString(length = 8) {
|
|
||||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
|
||||||
let result = '';
|
|
||||||
for (let i = 0; i < length; i++) {
|
|
||||||
result += chars.charAt(Math.floor(Math.random() * chars.length));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateShortId() {
|
|
||||||
return Math.random().toString(16).substr(2, 8);
|
|
||||||
}
|
|
||||||
|
|
||||||
function suggestPort(protocol) {
|
|
||||||
const ports = {
|
|
||||||
'vless': [443, 8443, 2053, 2083],
|
|
||||||
'vmess': [443, 80, 8080, 8443],
|
|
||||||
'trojan': [443, 8443, 2087],
|
|
||||||
'shadowsocks': [8388, 1080, 8080]
|
|
||||||
};
|
|
||||||
const protocolPorts = ports[protocol] || [443];
|
|
||||||
return protocolPorts[Math.floor(Math.random() * protocolPorts.length)];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add generate buttons to fields
|
|
||||||
function addGenerateButtons() {
|
|
||||||
console.log('Adding generate buttons');
|
|
||||||
|
|
||||||
// Add tag generator
|
|
||||||
addGenerateButton('id_tag', '🎲', () => `inbound-${generateShortId()}`);
|
|
||||||
|
|
||||||
// Add port suggestion based on protocol
|
|
||||||
addGenerateButton('id_port', '🎯', () => {
|
|
||||||
const protocol = document.getElementById('id_protocol')?.value;
|
|
||||||
return suggestPort(protocol);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function addGenerateButton(fieldId, icon, generator) {
|
|
||||||
const field = document.getElementById(fieldId);
|
|
||||||
if (!field || field.nextElementSibling?.classList.contains('generate-btn')) return;
|
|
||||||
|
|
||||||
const button = document.createElement('button');
|
|
||||||
button.type = 'button';
|
|
||||||
button.className = 'generate-btn btn btn-sm btn-secondary';
|
|
||||||
button.innerHTML = icon;
|
|
||||||
button.title = 'Generate value';
|
|
||||||
button.style.cssText = 'margin-left: 5px; padding: 2px 6px; font-size: 12px;';
|
|
||||||
|
|
||||||
button.addEventListener('click', () => {
|
|
||||||
const value = generator();
|
|
||||||
field.value = value;
|
|
||||||
showMessage(`Generated: ${value}`, 'success');
|
|
||||||
field.dispatchEvent(new Event('change', { bubbles: true }));
|
|
||||||
});
|
|
||||||
|
|
||||||
field.parentNode.insertBefore(button, field.nextSibling);
|
|
||||||
}
|
|
||||||
1289
static/index.html
Normal file
1289
static/index.html
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,854 +0,0 @@
|
|||||||
from django.contrib import admin
|
|
||||||
from django.utils.html import format_html
|
|
||||||
from django.urls import path, reverse
|
|
||||||
from django.shortcuts import redirect
|
|
||||||
from django.contrib import messages
|
|
||||||
from django.utils import timezone
|
|
||||||
from django import forms
|
|
||||||
from django.contrib.admin.widgets import FilteredSelectMultiple
|
|
||||||
from .models import BotSettings, TelegramMessage, AccessRequest
|
|
||||||
from .localization import MessageLocalizer
|
|
||||||
from vpn.models import User
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class BotSettingsAdminForm(forms.ModelForm):
|
|
||||||
"""Custom form for BotSettings with Telegram admin selection"""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = BotSettings
|
|
||||||
fields = '__all__'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
# Show all users for telegram_admins selection
|
|
||||||
if 'telegram_admins' in self.fields:
|
|
||||||
self.fields['telegram_admins'].queryset = User.objects.all().order_by('username')
|
|
||||||
self.fields['telegram_admins'].help_text = (
|
|
||||||
"Select users who will have admin access in the bot. "
|
|
||||||
"Users will get admin rights when they connect to the bot with their Telegram account."
|
|
||||||
)
|
|
||||||
|
|
||||||
def clean_telegram_admins(self):
|
|
||||||
"""Validate that selected admins have telegram_user_id or telegram_username"""
|
|
||||||
admins = self.cleaned_data.get('telegram_admins')
|
|
||||||
# No validation needed - admins can be selected even without telegram connection
|
|
||||||
# They will get admin rights when they connect via bot
|
|
||||||
return admins
|
|
||||||
|
|
||||||
|
|
||||||
class AccessRequestAdminForm(forms.ModelForm):
|
|
||||||
"""Custom form for AccessRequest with existing user selection"""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = AccessRequest
|
|
||||||
fields = '__all__'
|
|
||||||
widgets = {
|
|
||||||
'selected_subscription_groups': FilteredSelectMultiple(
|
|
||||||
verbose_name='Subscription Groups',
|
|
||||||
is_stacked=False
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
# Rename the field for better UI
|
|
||||||
if 'selected_existing_user' in self.fields:
|
|
||||||
self.fields['selected_existing_user'].label = 'Link to existing user'
|
|
||||||
self.fields['selected_existing_user'].empty_label = "— Create new user —"
|
|
||||||
self.fields['selected_existing_user'].help_text = "Select an existing user without Telegram to link, or leave empty to create new user"
|
|
||||||
# Get users without telegram_user_id
|
|
||||||
from vpn.models import User
|
|
||||||
self.fields['selected_existing_user'].queryset = User.objects.filter(
|
|
||||||
telegram_user_id__isnull=True
|
|
||||||
).order_by('username')
|
|
||||||
|
|
||||||
# Configure subscription group fields
|
|
||||||
if 'selected_subscription_groups' in self.fields:
|
|
||||||
from vpn.models_xray import SubscriptionGroup
|
|
||||||
self.fields['selected_subscription_groups'].queryset = SubscriptionGroup.objects.filter(
|
|
||||||
is_active=True
|
|
||||||
).order_by('name')
|
|
||||||
self.fields['selected_subscription_groups'].label = 'Subscription Groups'
|
|
||||||
self.fields['selected_subscription_groups'].help_text = 'Select subscription groups to assign to this user'
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(BotSettings)
|
|
||||||
class BotSettingsAdmin(admin.ModelAdmin):
|
|
||||||
form = BotSettingsAdminForm
|
|
||||||
list_display = ('__str__', 'enabled', 'bot_token_display', 'admin_count_display', 'updated_at')
|
|
||||||
fieldsets = (
|
|
||||||
('Bot Configuration', {
|
|
||||||
'fields': ('bot_token', 'enabled', 'bot_status_display'),
|
|
||||||
'description': 'Configure bot settings and view current status'
|
|
||||||
}),
|
|
||||||
('Admin Management', {
|
|
||||||
'fields': ('telegram_admins', 'admin_info_display'),
|
|
||||||
'description': 'Select users with linked Telegram accounts who will have admin access in the bot'
|
|
||||||
}),
|
|
||||||
('Connection Settings', {
|
|
||||||
'fields': ('api_base_url', 'connection_timeout', 'use_proxy', 'proxy_url'),
|
|
||||||
'classes': ('collapse',)
|
|
||||||
}),
|
|
||||||
('Timestamps', {
|
|
||||||
'fields': ('created_at', 'updated_at'),
|
|
||||||
'classes': ('collapse',)
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
readonly_fields = ('created_at', 'updated_at', 'bot_status_display', 'admin_info_display')
|
|
||||||
filter_horizontal = ('telegram_admins',)
|
|
||||||
|
|
||||||
def bot_token_display(self, obj):
|
|
||||||
"""Mask bot token for security"""
|
|
||||||
if obj.bot_token:
|
|
||||||
token = obj.bot_token
|
|
||||||
if len(token) > 10:
|
|
||||||
return f"{token[:6]}...{token[-4:]}"
|
|
||||||
return "Token set"
|
|
||||||
return "No token set"
|
|
||||||
bot_token_display.short_description = "Bot Token"
|
|
||||||
|
|
||||||
def admin_count_display(self, obj):
|
|
||||||
"""Display count of Telegram admins"""
|
|
||||||
count = obj.telegram_admins.count()
|
|
||||||
if count == 0:
|
|
||||||
return "No admins"
|
|
||||||
elif count == 1:
|
|
||||||
return "1 admin"
|
|
||||||
else:
|
|
||||||
return f"{count} admins"
|
|
||||||
admin_count_display.short_description = "Telegram Admins"
|
|
||||||
|
|
||||||
def admin_info_display(self, obj):
|
|
||||||
"""Display detailed admin information"""
|
|
||||||
if not obj.pk:
|
|
||||||
return "Save settings first to manage admins"
|
|
||||||
|
|
||||||
admins = obj.telegram_admins.all()
|
|
||||||
|
|
||||||
if not admins.exists():
|
|
||||||
html = '<div style="background: #fff3cd; padding: 10px; border-radius: 4px; border-left: 4px solid #ffc107;">'
|
|
||||||
html += '<p style="margin: 0; color: #856404;"><strong>⚠️ No Telegram admins configured</strong></p>'
|
|
||||||
html += '<p style="margin: 5px 0 0 0; color: #856404;">Select users above to give them admin access in the Telegram bot.</p>'
|
|
||||||
html += '</div>'
|
|
||||||
else:
|
|
||||||
html = '<div style="background: #d4edda; padding: 10px; border-radius: 4px; border-left: 4px solid #28a745;">'
|
|
||||||
html += f'<p style="margin: 0; color: #155724;"><strong>✅ {admins.count()} Telegram admin(s) configured</strong></p>'
|
|
||||||
html += '<div style="margin-top: 8px;">'
|
|
||||||
|
|
||||||
for admin in admins:
|
|
||||||
html += '<div style="background: white; margin: 4px 0; padding: 6px 10px; border-radius: 3px; border: 1px solid #c3e6cb;">'
|
|
||||||
html += f'<strong>{admin.username}</strong>'
|
|
||||||
|
|
||||||
if admin.telegram_username:
|
|
||||||
html += f' (@{admin.telegram_username})'
|
|
||||||
|
|
||||||
html += f' <small style="color: #6c757d;">ID: {admin.telegram_user_id}</small>'
|
|
||||||
|
|
||||||
if admin.first_name or admin.last_name:
|
|
||||||
name_parts = []
|
|
||||||
if admin.first_name:
|
|
||||||
name_parts.append(admin.first_name)
|
|
||||||
if admin.last_name:
|
|
||||||
name_parts.append(admin.last_name)
|
|
||||||
html += f'<br><small style="color: #6c757d;">Name: {" ".join(name_parts)}</small>'
|
|
||||||
|
|
||||||
html += '</div>'
|
|
||||||
|
|
||||||
html += '</div>'
|
|
||||||
html += '<p style="margin: 8px 0 0 0; color: #155724; font-size: 12px;">These users will receive notifications about new access requests and can approve/reject them directly in Telegram.</p>'
|
|
||||||
html += '</div>'
|
|
||||||
|
|
||||||
return format_html(html)
|
|
||||||
admin_info_display.short_description = "Admin Configuration"
|
|
||||||
|
|
||||||
def bot_status_display(self, obj):
|
|
||||||
"""Display bot status with control buttons"""
|
|
||||||
from .bot import TelegramBotManager
|
|
||||||
import os
|
|
||||||
from django.conf import settings as django_settings
|
|
||||||
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
|
|
||||||
# Check if lock file exists - only reliable indicator
|
|
||||||
lock_dir = os.path.join(getattr(django_settings, 'BASE_DIR', '/tmp'), 'telegram_bot_locks')
|
|
||||||
lock_path = os.path.join(lock_dir, 'telegram_bot.lock')
|
|
||||||
is_running = os.path.exists(lock_path)
|
|
||||||
|
|
||||||
if is_running:
|
|
||||||
status_html = '<span style="color: green; font-weight: bold;">🟢 Bot is RUNNING</span>'
|
|
||||||
else:
|
|
||||||
status_html = '<span style="color: red; font-weight: bold;">🔴 Bot is STOPPED</span>'
|
|
||||||
|
|
||||||
# Add control buttons
|
|
||||||
status_html += '<br><br>'
|
|
||||||
if is_running:
|
|
||||||
status_html += f'<a class="button" href="{reverse("admin:telegram_bot_stop_bot")}">Stop Bot</a> '
|
|
||||||
status_html += f'<a class="button" href="{reverse("admin:telegram_bot_restart_bot")}">Restart Bot</a>'
|
|
||||||
else:
|
|
||||||
if obj.enabled and obj.bot_token:
|
|
||||||
status_html += f'<a class="button" href="{reverse("admin:telegram_bot_start_bot")}">Start Bot</a>'
|
|
||||||
else:
|
|
||||||
status_html += '<span style="color: gray;">Configure bot token and enable bot to start</span>'
|
|
||||||
|
|
||||||
return format_html(status_html)
|
|
||||||
bot_status_display.short_description = "Bot Status"
|
|
||||||
|
|
||||||
def get_urls(self):
|
|
||||||
urls = super().get_urls()
|
|
||||||
custom_urls = [
|
|
||||||
path('start-bot/', self.start_bot, name='telegram_bot_start_bot'),
|
|
||||||
path('stop-bot/', self.stop_bot, name='telegram_bot_stop_bot'),
|
|
||||||
path('restart-bot/', self.restart_bot, name='telegram_bot_restart_bot'),
|
|
||||||
]
|
|
||||||
return custom_urls + urls
|
|
||||||
|
|
||||||
def start_bot(self, request):
|
|
||||||
"""Start the telegram bot"""
|
|
||||||
try:
|
|
||||||
from .bot import TelegramBotManager
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
manager.start()
|
|
||||||
messages.success(request, "Bot started successfully!")
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Failed to start bot: {e}")
|
|
||||||
logger.error(f"Failed to start bot: {e}")
|
|
||||||
|
|
||||||
return redirect('admin:telegram_bot_botsettings_change', object_id=1)
|
|
||||||
|
|
||||||
def stop_bot(self, request):
|
|
||||||
"""Stop the telegram bot"""
|
|
||||||
try:
|
|
||||||
from .bot import TelegramBotManager
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
manager.stop()
|
|
||||||
messages.success(request, "Bot stopped successfully!")
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Failed to stop bot: {e}")
|
|
||||||
logger.error(f"Failed to stop bot: {e}")
|
|
||||||
|
|
||||||
return redirect('admin:telegram_bot_botsettings_change', object_id=1)
|
|
||||||
|
|
||||||
def restart_bot(self, request):
|
|
||||||
"""Restart the telegram bot"""
|
|
||||||
try:
|
|
||||||
from .bot import TelegramBotManager
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
manager.restart()
|
|
||||||
messages.success(request, "Bot restarted successfully!")
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Failed to restart bot: {e}")
|
|
||||||
logger.error(f"Failed to restart bot: {e}")
|
|
||||||
|
|
||||||
return redirect('admin:telegram_bot_botsettings_change', object_id=1)
|
|
||||||
|
|
||||||
def has_add_permission(self, request):
|
|
||||||
# Prevent creating multiple instances
|
|
||||||
return not BotSettings.objects.exists()
|
|
||||||
|
|
||||||
def has_delete_permission(self, request, obj=None):
|
|
||||||
# Prevent deletion
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(TelegramMessage)
|
|
||||||
class TelegramMessageAdmin(admin.ModelAdmin):
|
|
||||||
list_display = (
|
|
||||||
'created_at',
|
|
||||||
'direction_display',
|
|
||||||
'user_display',
|
|
||||||
'language_display',
|
|
||||||
'message_preview',
|
|
||||||
'linked_user'
|
|
||||||
)
|
|
||||||
list_filter = (
|
|
||||||
'direction',
|
|
||||||
'created_at',
|
|
||||||
('linked_user', admin.EmptyFieldListFilter),
|
|
||||||
)
|
|
||||||
search_fields = (
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
'message_text',
|
|
||||||
'telegram_user_id'
|
|
||||||
)
|
|
||||||
readonly_fields = (
|
|
||||||
'direction',
|
|
||||||
'telegram_user_id',
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
'chat_id',
|
|
||||||
'message_id',
|
|
||||||
'message_text',
|
|
||||||
'raw_data_display',
|
|
||||||
'created_at',
|
|
||||||
'linked_user',
|
|
||||||
'user_language'
|
|
||||||
)
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
('Message Info', {
|
|
||||||
'fields': (
|
|
||||||
'direction',
|
|
||||||
'message_text',
|
|
||||||
'created_at'
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
('Telegram User', {
|
|
||||||
'fields': (
|
|
||||||
'telegram_user_id',
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
('Technical Details', {
|
|
||||||
'fields': (
|
|
||||||
'chat_id',
|
|
||||||
'message_id',
|
|
||||||
'linked_user',
|
|
||||||
'raw_data_display'
|
|
||||||
),
|
|
||||||
'classes': ('collapse',)
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
ordering = ['-created_at']
|
|
||||||
list_per_page = 50
|
|
||||||
date_hierarchy = 'created_at'
|
|
||||||
|
|
||||||
def direction_display(self, obj):
|
|
||||||
"""Display direction with icon"""
|
|
||||||
if obj.direction == 'incoming':
|
|
||||||
return format_html('<span style="color: blue;">⬇️ Incoming</span>')
|
|
||||||
else:
|
|
||||||
return format_html('<span style="color: green;">⬆️ Outgoing</span>')
|
|
||||||
direction_display.short_description = "Direction"
|
|
||||||
|
|
||||||
def user_display(self, obj):
|
|
||||||
"""Display user info"""
|
|
||||||
display = obj.display_name
|
|
||||||
if obj.telegram_user_id:
|
|
||||||
display += f" (ID: {obj.telegram_user_id})"
|
|
||||||
return display
|
|
||||||
user_display.short_description = "Telegram User"
|
|
||||||
|
|
||||||
def language_display(self, obj):
|
|
||||||
"""Display user language"""
|
|
||||||
lang_map = {'ru': '🇷🇺 RU', 'en': '🇺🇸 EN'}
|
|
||||||
return lang_map.get(obj.user_language, obj.user_language or 'Unknown')
|
|
||||||
language_display.short_description = "Language"
|
|
||||||
|
|
||||||
def message_preview(self, obj):
|
|
||||||
"""Show message preview"""
|
|
||||||
if len(obj.message_text) > 100:
|
|
||||||
return obj.message_text[:100] + "..."
|
|
||||||
return obj.message_text
|
|
||||||
message_preview.short_description = "Message"
|
|
||||||
|
|
||||||
def raw_data_display(self, obj):
|
|
||||||
"""Display raw data as formatted JSON"""
|
|
||||||
import json
|
|
||||||
if obj.raw_data:
|
|
||||||
formatted = json.dumps(obj.raw_data, indent=2, ensure_ascii=False)
|
|
||||||
return format_html('<pre style="max-width: 800px; overflow: auto;">{}</pre>', formatted)
|
|
||||||
return "No raw data"
|
|
||||||
raw_data_display.short_description = "Raw Data"
|
|
||||||
|
|
||||||
def has_add_permission(self, request):
|
|
||||||
# Messages are created automatically by bot
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_change_permission(self, request, obj=None):
|
|
||||||
# Messages are read-only
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_delete_permission(self, request, obj=None):
|
|
||||||
# Allow deletion for cleanup
|
|
||||||
return request.user.is_superuser
|
|
||||||
|
|
||||||
def get_actions(self, request):
|
|
||||||
"""Add custom actions"""
|
|
||||||
actions = super().get_actions(request)
|
|
||||||
if not request.user.is_superuser:
|
|
||||||
# Remove delete action for non-superusers
|
|
||||||
if 'delete_selected' in actions:
|
|
||||||
del actions['delete_selected']
|
|
||||||
return actions
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(AccessRequest)
|
|
||||||
class AccessRequestAdmin(admin.ModelAdmin):
|
|
||||||
form = AccessRequestAdminForm
|
|
||||||
list_display = (
|
|
||||||
'created_at',
|
|
||||||
'user_display',
|
|
||||||
'approved_display',
|
|
||||||
'language_display',
|
|
||||||
'desired_username_display',
|
|
||||||
'message_preview',
|
|
||||||
'created_user',
|
|
||||||
'processed_by'
|
|
||||||
)
|
|
||||||
list_filter = (
|
|
||||||
'approved',
|
|
||||||
'created_at',
|
|
||||||
'processed_at',
|
|
||||||
('processed_by', admin.EmptyFieldListFilter),
|
|
||||||
)
|
|
||||||
search_fields = (
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
'telegram_user_id',
|
|
||||||
'message_text'
|
|
||||||
)
|
|
||||||
readonly_fields = (
|
|
||||||
'telegram_user_id',
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
'message_text',
|
|
||||||
'chat_id',
|
|
||||||
'created_at',
|
|
||||||
'first_message',
|
|
||||||
'processed_at',
|
|
||||||
'processed_by',
|
|
||||||
'created_user',
|
|
||||||
'user_language'
|
|
||||||
)
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
('Request Info', {
|
|
||||||
'fields': (
|
|
||||||
'approved',
|
|
||||||
'admin_comment',
|
|
||||||
'created_at',
|
|
||||||
'processed_at',
|
|
||||||
'processed_by'
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
('User Creation', {
|
|
||||||
'fields': (
|
|
||||||
'selected_existing_user',
|
|
||||||
'desired_username',
|
|
||||||
),
|
|
||||||
'description': 'Choose existing user to link OR specify username for new user'
|
|
||||||
}),
|
|
||||||
('VPN Access Configuration', {
|
|
||||||
'fields': (
|
|
||||||
'selected_subscription_groups',
|
|
||||||
),
|
|
||||||
'description': 'Select subscription groups to assign to the user'
|
|
||||||
}),
|
|
||||||
('Telegram User', {
|
|
||||||
'fields': (
|
|
||||||
'telegram_user_id',
|
|
||||||
'telegram_username',
|
|
||||||
'telegram_first_name',
|
|
||||||
'telegram_last_name',
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
('Message Details', {
|
|
||||||
'fields': (
|
|
||||||
'message_text',
|
|
||||||
'chat_id',
|
|
||||||
'first_message'
|
|
||||||
),
|
|
||||||
'classes': ('collapse',)
|
|
||||||
}),
|
|
||||||
('Processing Results', {
|
|
||||||
'fields': (
|
|
||||||
'created_user',
|
|
||||||
)
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
ordering = ['-created_at']
|
|
||||||
list_per_page = 50
|
|
||||||
date_hierarchy = 'created_at'
|
|
||||||
actions = ['approve_requests']
|
|
||||||
|
|
||||||
def user_display(self, obj):
|
|
||||||
"""Display user info"""
|
|
||||||
return obj.display_name
|
|
||||||
user_display.short_description = "Telegram User"
|
|
||||||
|
|
||||||
def approved_display(self, obj):
|
|
||||||
"""Display approved status with colors"""
|
|
||||||
if obj.approved:
|
|
||||||
return format_html('<span style="color: green; font-weight: bold;">✅ Approved</span>')
|
|
||||||
else:
|
|
||||||
return format_html('<span style="color: orange; font-weight: bold;">🔄 Pending</span>')
|
|
||||||
approved_display.short_description = "Status"
|
|
||||||
|
|
||||||
def message_preview(self, obj):
|
|
||||||
"""Show message preview"""
|
|
||||||
if len(obj.message_text) > 100:
|
|
||||||
return obj.message_text[:100] + "..."
|
|
||||||
return obj.message_text
|
|
||||||
message_preview.short_description = "Message"
|
|
||||||
|
|
||||||
def desired_username_display(self, obj):
|
|
||||||
"""Display desired username"""
|
|
||||||
if obj.desired_username:
|
|
||||||
return obj.desired_username
|
|
||||||
else:
|
|
||||||
fallback = obj.telegram_username or obj.telegram_first_name or f"tg_{obj.telegram_user_id}"
|
|
||||||
return format_html('<span style="color: gray; font-style: italic;">{}</span>', fallback)
|
|
||||||
desired_username_display.short_description = "Desired Username"
|
|
||||||
|
|
||||||
def language_display(self, obj):
|
|
||||||
"""Display user language with flag"""
|
|
||||||
lang_map = {'ru': '🇷🇺 RU', 'en': '🇺🇸 EN'}
|
|
||||||
return lang_map.get(obj.user_language, obj.user_language or 'Unknown')
|
|
||||||
language_display.short_description = "Language"
|
|
||||||
|
|
||||||
def approve_requests(self, request, queryset):
|
|
||||||
"""Approve selected access requests"""
|
|
||||||
pending_requests = queryset.filter(approved=False)
|
|
||||||
count = 0
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
for access_request in pending_requests:
|
|
||||||
try:
|
|
||||||
logger.info(f"Approving request {access_request.id} from user {access_request.telegram_user_id}")
|
|
||||||
user = self._create_user_from_request(access_request, request.user)
|
|
||||||
|
|
||||||
if user:
|
|
||||||
access_request.approved = True
|
|
||||||
access_request.processed_by = request.user
|
|
||||||
access_request.processed_at = timezone.now()
|
|
||||||
access_request.created_user = user
|
|
||||||
access_request.save()
|
|
||||||
|
|
||||||
logger.info(f"Successfully approved request {access_request.id}, created user {user.username}")
|
|
||||||
|
|
||||||
# Send notification to user
|
|
||||||
self._send_approval_notification(access_request)
|
|
||||||
count += 1
|
|
||||||
else:
|
|
||||||
errors.append(f"Failed to create user for {access_request.display_name}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"Failed to approve request from {access_request.display_name}: {e}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
errors.append(error_msg)
|
|
||||||
|
|
||||||
if count:
|
|
||||||
messages.success(request, f"Successfully approved {count} request(s)")
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
for error in errors:
|
|
||||||
messages.error(request, error)
|
|
||||||
|
|
||||||
approve_requests.short_description = "✅ Approve selected requests"
|
|
||||||
|
|
||||||
def save_model(self, request, obj, form, change):
|
|
||||||
"""Override save to handle existing user linking"""
|
|
||||||
super().save_model(request, obj, form, change)
|
|
||||||
|
|
||||||
# If approved and existing user was selected, link them
|
|
||||||
if obj.approved and obj.selected_existing_user and not obj.created_user:
|
|
||||||
try:
|
|
||||||
# Link telegram data to selected user
|
|
||||||
obj.selected_existing_user.telegram_user_id = obj.telegram_user_id
|
|
||||||
obj.selected_existing_user.telegram_username = obj.telegram_username
|
|
||||||
obj.selected_existing_user.telegram_first_name = obj.telegram_first_name or ""
|
|
||||||
obj.selected_existing_user.telegram_last_name = obj.telegram_last_name or ""
|
|
||||||
obj.selected_existing_user.save()
|
|
||||||
|
|
||||||
# Update the request to reference the linked user
|
|
||||||
obj.created_user = obj.selected_existing_user
|
|
||||||
obj.processed_by = request.user
|
|
||||||
obj.processed_at = timezone.now()
|
|
||||||
obj.save()
|
|
||||||
|
|
||||||
# Assign VPN access to the linked user
|
|
||||||
try:
|
|
||||||
self._assign_vpn_access(obj.selected_existing_user, obj)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to assign VPN access: {e}")
|
|
||||||
messages.warning(request, f"User linked but VPN access assignment failed: {e}")
|
|
||||||
|
|
||||||
# Send notification
|
|
||||||
self._send_approval_notification(obj)
|
|
||||||
|
|
||||||
messages.success(request, f"Successfully linked Telegram user to existing user {obj.selected_existing_user.username}")
|
|
||||||
logger.info(f"Linked Telegram user {obj.telegram_user_id} to existing user {obj.selected_existing_user.username}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Failed to link existing user: {e}")
|
|
||||||
logger.error(f"Failed to link existing user: {e}")
|
|
||||||
|
|
||||||
def _create_user_from_request(self, access_request, admin_user):
|
|
||||||
"""Create User from AccessRequest or link to existing user"""
|
|
||||||
from vpn.models import User
|
|
||||||
import secrets
|
|
||||||
import string
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if user already exists by telegram_user_id
|
|
||||||
existing_user = User.objects.filter(telegram_user_id=access_request.telegram_user_id).first()
|
|
||||||
if existing_user:
|
|
||||||
logger.info(f"User already exists: {existing_user.username}")
|
|
||||||
return existing_user
|
|
||||||
|
|
||||||
# Check if admin selected an existing user to link
|
|
||||||
if access_request.selected_existing_user:
|
|
||||||
selected_user = access_request.selected_existing_user
|
|
||||||
logger.info(f"Linking Telegram user {access_request.telegram_user_id} to selected existing user {selected_user.username}")
|
|
||||||
|
|
||||||
# Link telegram data to selected user
|
|
||||||
selected_user.telegram_user_id = access_request.telegram_user_id
|
|
||||||
selected_user.telegram_username = access_request.telegram_username
|
|
||||||
selected_user.telegram_first_name = access_request.telegram_first_name or ""
|
|
||||||
selected_user.telegram_last_name = access_request.telegram_last_name or ""
|
|
||||||
selected_user.save()
|
|
||||||
|
|
||||||
# Assign VPN access to the linked user
|
|
||||||
try:
|
|
||||||
self._assign_vpn_access(selected_user, access_request)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to assign VPN access to user {selected_user.username}: {e}")
|
|
||||||
|
|
||||||
return selected_user
|
|
||||||
|
|
||||||
# Check if we can link to existing user by telegram_username
|
|
||||||
if access_request.telegram_username:
|
|
||||||
existing_user_by_username = User.objects.filter(
|
|
||||||
telegram_username__iexact=access_request.telegram_username,
|
|
||||||
telegram_user_id__isnull=True # Not yet linked to Telegram
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if existing_user_by_username:
|
|
||||||
# Link telegram data to existing user
|
|
||||||
logger.info(f"Linking Telegram @{access_request.telegram_username} to existing user {existing_user_by_username.username}")
|
|
||||||
existing_user_by_username.telegram_user_id = access_request.telegram_user_id
|
|
||||||
existing_user_by_username.telegram_username = access_request.telegram_username
|
|
||||||
existing_user_by_username.telegram_first_name = access_request.telegram_first_name or ""
|
|
||||||
existing_user_by_username.telegram_last_name = access_request.telegram_last_name or ""
|
|
||||||
existing_user_by_username.save()
|
|
||||||
|
|
||||||
# Assign VPN access to the linked user
|
|
||||||
try:
|
|
||||||
self._assign_vpn_access(existing_user_by_username, access_request)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to assign VPN access to user {existing_user_by_username.username}: {e}")
|
|
||||||
|
|
||||||
return existing_user_by_username
|
|
||||||
|
|
||||||
# Use desired_username if provided, otherwise fallback to Telegram data
|
|
||||||
username = access_request.desired_username
|
|
||||||
if not username:
|
|
||||||
# Fallback to telegram_username, first_name or user_id
|
|
||||||
username = access_request.telegram_username or access_request.telegram_first_name or f"tg_{access_request.telegram_user_id}"
|
|
||||||
|
|
||||||
# Clean username (remove special characters)
|
|
||||||
username = ''.join(c for c in username if c.isalnum() or c in '_-').lower()
|
|
||||||
if not username:
|
|
||||||
username = f"tg_{access_request.telegram_user_id}"
|
|
||||||
|
|
||||||
# Make sure username is unique
|
|
||||||
original_username = username
|
|
||||||
counter = 1
|
|
||||||
while User.objects.filter(username=username).exists():
|
|
||||||
username = f"{original_username}_{counter}"
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
# Create new user since no existing user found to link
|
|
||||||
# Generate random password
|
|
||||||
password = ''.join(secrets.choice(string.ascii_letters + string.digits) for _ in range(12))
|
|
||||||
|
|
||||||
logger.info(f"Creating new user with username: {username}")
|
|
||||||
|
|
||||||
# Create user
|
|
||||||
user = User.objects.create_user(
|
|
||||||
username=username,
|
|
||||||
password=password,
|
|
||||||
first_name=access_request.telegram_first_name or "",
|
|
||||||
last_name=access_request.telegram_last_name or "",
|
|
||||||
telegram_user_id=access_request.telegram_user_id,
|
|
||||||
telegram_username=access_request.telegram_username or "",
|
|
||||||
telegram_first_name=access_request.telegram_first_name or "",
|
|
||||||
telegram_last_name=access_request.telegram_last_name or "",
|
|
||||||
is_active=True
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Successfully created user {user.username} (ID: {user.id}) from Telegram request {access_request.id}")
|
|
||||||
|
|
||||||
# Assign VPN access (inbounds and subscription groups)
|
|
||||||
try:
|
|
||||||
self._assign_vpn_access(user, access_request)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to assign VPN access to user {user.username}: {e}")
|
|
||||||
# Continue even if VPN assignment fails - user is already created
|
|
||||||
|
|
||||||
return user
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating user from request {access_request.id}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _assign_vpn_access(self, user, access_request):
|
|
||||||
"""Assign selected subscription groups to the user"""
|
|
||||||
try:
|
|
||||||
from vpn.models_xray import UserSubscription
|
|
||||||
|
|
||||||
# Assign subscription groups
|
|
||||||
group_count = 0
|
|
||||||
for subscription_group in access_request.selected_subscription_groups.all():
|
|
||||||
user_subscription, created = UserSubscription.objects.get_or_create(
|
|
||||||
user=user,
|
|
||||||
subscription_group=subscription_group,
|
|
||||||
defaults={'active': True}
|
|
||||||
)
|
|
||||||
if created:
|
|
||||||
logger.info(f"Assigned subscription group '{subscription_group.name}' to user {user.username}")
|
|
||||||
group_count += 1
|
|
||||||
else:
|
|
||||||
# Ensure it's active if it already existed
|
|
||||||
if not user_subscription.active:
|
|
||||||
user_subscription.active = True
|
|
||||||
user_subscription.save()
|
|
||||||
logger.info(f"Re-activated subscription group '{subscription_group.name}' for user {user.username}")
|
|
||||||
group_count += 1
|
|
||||||
|
|
||||||
logger.info(f"Successfully assigned {group_count} subscription groups to user {user.username}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error assigning VPN access to user {user.username}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _send_approval_notification(self, access_request):
|
|
||||||
"""Send approval notification via Telegram"""
|
|
||||||
try:
|
|
||||||
from .models import BotSettings
|
|
||||||
from telegram import Bot
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
settings = BotSettings.get_settings()
|
|
||||||
if not settings.enabled or not settings.bot_token:
|
|
||||||
logger.warning("Bot not configured, skipping notification")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Create a simple Bot instance for sending notification
|
|
||||||
# This bypasses the need for the running bot manager
|
|
||||||
async def send_notification():
|
|
||||||
try:
|
|
||||||
# Create bot with custom request settings
|
|
||||||
from telegram.request import HTTPXRequest
|
|
||||||
|
|
||||||
request_kwargs = {
|
|
||||||
'connection_pool_size': 1,
|
|
||||||
'read_timeout': settings.connection_timeout,
|
|
||||||
'write_timeout': settings.connection_timeout,
|
|
||||||
'connect_timeout': settings.connection_timeout,
|
|
||||||
}
|
|
||||||
|
|
||||||
if settings.use_proxy and settings.proxy_url:
|
|
||||||
request_kwargs['proxy'] = settings.proxy_url
|
|
||||||
|
|
||||||
request = HTTPXRequest(**request_kwargs)
|
|
||||||
bot = Bot(token=settings.bot_token, request=request)
|
|
||||||
|
|
||||||
# Send localized approval message with new keyboard
|
|
||||||
from telegram import ReplyKeyboardMarkup, KeyboardButton
|
|
||||||
language = access_request.user_language or 'en'
|
|
||||||
|
|
||||||
# Get localized texts
|
|
||||||
message = MessageLocalizer.get_message('approval_notification', language)
|
|
||||||
access_btn_text = MessageLocalizer.get_button_text('access', language)
|
|
||||||
|
|
||||||
# Create keyboard with Access button
|
|
||||||
keyboard = [[KeyboardButton(access_btn_text)]]
|
|
||||||
reply_markup = ReplyKeyboardMarkup(
|
|
||||||
keyboard,
|
|
||||||
resize_keyboard=True,
|
|
||||||
one_time_keyboard=False
|
|
||||||
)
|
|
||||||
|
|
||||||
await bot.send_message(
|
|
||||||
chat_id=access_request.telegram_user_id,
|
|
||||||
text=message,
|
|
||||||
reply_markup=reply_markup
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Sent approval notification to {access_request.telegram_user_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to send Telegram message: {e}")
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
# Clean up bot connection
|
|
||||||
await request.shutdown()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Run in thread to avoid blocking admin interface
|
|
||||||
import threading
|
|
||||||
|
|
||||||
def run_async_notification():
|
|
||||||
try:
|
|
||||||
asyncio.run(send_notification())
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error in notification thread: {e}")
|
|
||||||
|
|
||||||
thread = threading.Thread(target=run_async_notification, daemon=True)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to send approval notification: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def has_add_permission(self, request):
|
|
||||||
# Requests are created by bot
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_change_permission(self, request, obj=None):
|
|
||||||
# Allow changing only status and comment
|
|
||||||
return True
|
|
||||||
|
|
||||||
def save_model(self, request, obj, form, change):
|
|
||||||
"""Automatically handle approval and user creation"""
|
|
||||||
# Check if this is a change to approved
|
|
||||||
was_approved = False
|
|
||||||
|
|
||||||
# If desired_username was changed and is empty, set default from Telegram data
|
|
||||||
if change and 'desired_username' in form.changed_data and not obj.desired_username:
|
|
||||||
obj.desired_username = obj.telegram_username or obj.telegram_first_name or f"tg_{obj.telegram_user_id}"
|
|
||||||
|
|
||||||
if change and 'approved' in form.changed_data and obj.approved:
|
|
||||||
# Set processed_by and processed_at
|
|
||||||
if not obj.processed_by:
|
|
||||||
obj.processed_by = request.user
|
|
||||||
if not obj.processed_at:
|
|
||||||
obj.processed_at = timezone.now()
|
|
||||||
was_approved = True
|
|
||||||
|
|
||||||
# If approved and no user created yet, create user
|
|
||||||
if was_approved and not obj.created_user:
|
|
||||||
try:
|
|
||||||
logger.info(f"Auto-creating user for approved request {obj.id}")
|
|
||||||
user = self._create_user_from_request(obj, request.user)
|
|
||||||
if user:
|
|
||||||
obj.created_user = user
|
|
||||||
messages.success(request, f"User '{user.username}' created successfully!")
|
|
||||||
logger.info(f"Auto-created user {user.username} for request {obj.id}")
|
|
||||||
|
|
||||||
# Send approval notification
|
|
||||||
self._send_approval_notification(obj)
|
|
||||||
else:
|
|
||||||
messages.error(request, f"Failed to create user for approved request {obj.id}")
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Error creating user: {e}")
|
|
||||||
logger.error(f"Error auto-creating user for request {obj.id}: {e}")
|
|
||||||
|
|
||||||
# Save the object
|
|
||||||
super().save_model(request, obj, form, change)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TelegramBotConfig(AppConfig):
|
|
||||||
default_auto_field = 'django.db.models.BigAutoField'
|
|
||||||
name = 'telegram_bot'
|
|
||||||
|
|
||||||
def ready(self):
|
|
||||||
"""Called when Django starts - attempt to auto-start bot if enabled"""
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Skip auto-start in various scenarios
|
|
||||||
skip_conditions = [
|
|
||||||
# Management commands
|
|
||||||
'migrate' in sys.argv,
|
|
||||||
'makemigrations' in sys.argv,
|
|
||||||
'collectstatic' in sys.argv,
|
|
||||||
'shell' in sys.argv,
|
|
||||||
'test' in sys.argv,
|
|
||||||
# Celery processes
|
|
||||||
'celery' in sys.argv,
|
|
||||||
'worker' in sys.argv,
|
|
||||||
'beat' in sys.argv,
|
|
||||||
# Environment variables that indicate worker/beat processes
|
|
||||||
os.environ.get('CELERY_WORKER_NAME'),
|
|
||||||
os.environ.get('CELERY_BEAT'),
|
|
||||||
# Process name detection
|
|
||||||
any('celery' in arg.lower() for arg in sys.argv),
|
|
||||||
any('worker' in arg.lower() for arg in sys.argv),
|
|
||||||
any('beat' in arg.lower() for arg in sys.argv),
|
|
||||||
]
|
|
||||||
|
|
||||||
if any(skip_conditions):
|
|
||||||
logger.info(f"Skipping Telegram bot auto-start in process: {' '.join(sys.argv)}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Additional process detection by checking if we're in main process
|
|
||||||
try:
|
|
||||||
# Check if this is the main Django process (not a worker)
|
|
||||||
current_process = os.environ.get('DJANGO_SETTINGS_MODULE')
|
|
||||||
if not current_process:
|
|
||||||
logger.info("Skipping bot auto-start: not in main Django process")
|
|
||||||
return
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Delay import to avoid circular imports
|
|
||||||
try:
|
|
||||||
from .bot import TelegramBotManager
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
def delayed_autostart():
|
|
||||||
# Wait a bit for Django to fully initialize
|
|
||||||
time.sleep(2)
|
|
||||||
try:
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
if manager.auto_start_if_enabled():
|
|
||||||
logger.info("Telegram bot auto-started successfully")
|
|
||||||
else:
|
|
||||||
logger.info("Telegram bot auto-start skipped (disabled or already running)")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to auto-start Telegram bot: {e}")
|
|
||||||
|
|
||||||
logger.info("Starting Telegram bot auto-start thread")
|
|
||||||
# Start in background thread to not block Django startup
|
|
||||||
thread = threading.Thread(target=delayed_autostart, daemon=True)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error setting up Telegram bot auto-start: {e}")
|
|
||||||
1897
telegram_bot/bot.py
1897
telegram_bot/bot.py
File diff suppressed because it is too large
Load Diff
@@ -1,267 +0,0 @@
|
|||||||
"""
|
|
||||||
Message localization for Telegram bot
|
|
||||||
"""
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Translation dictionaries
|
|
||||||
MESSAGES = {
|
|
||||||
'en': {
|
|
||||||
'help_text': "📋 Welcome! Use buttons below to navigate.\n\n📊 Access - View your VPN subscriptions\n\nFor support contact administrator.",
|
|
||||||
'access_request_created': "Access request created, please wait.",
|
|
||||||
'new_user_welcome': "Welcome! To get access to VPN services, please request access using the button below.",
|
|
||||||
'pending_request_msg': "Your access request is pending approval. Please wait for administrator to review it.",
|
|
||||||
'choose_subscription': "**Choose subscription option:**",
|
|
||||||
'all_in_one_desc': "🌍 **All-in-one** - Get all subscriptions in one link",
|
|
||||||
'group_desc': "**Group** - Get specific group subscription",
|
|
||||||
'select_option': "Select an option below:",
|
|
||||||
'no_subscriptions': "❌ You don't have any active Xray subscriptions.\n\nPlease contact administrator for access.",
|
|
||||||
'group_subscription': "**Group: {group_name}**",
|
|
||||||
'subscription_link': "**🔗 Subscription Link:**",
|
|
||||||
'web_portal': "**🌐 Web Portal:**",
|
|
||||||
'tap_to_copy': "_Tap the subscription link to copy it. Use it in your Xray client._",
|
|
||||||
'all_in_one_subscription': "🌍 **All-in-one Subscription**",
|
|
||||||
'your_access_includes': "**Your Access Includes:**",
|
|
||||||
'universal_subscription_link': "**🔗 Universal Subscription Link:**",
|
|
||||||
'all_subscriptions_note': "_This link includes all your active subscriptions. Tap to copy._",
|
|
||||||
'error_loading_subscriptions': "❌ Error loading subscriptions. Please try again later.",
|
|
||||||
'error_loading_group': "❌ Error loading group subscription. Please try again later.",
|
|
||||||
'received_content': "Received your {message_type}. An administrator will review it.",
|
|
||||||
'approval_notification': "✅ Access approved!",
|
|
||||||
'content_types': {
|
|
||||||
'photo': 'photo',
|
|
||||||
'document': 'document',
|
|
||||||
'voice': 'voice',
|
|
||||||
'video': 'video',
|
|
||||||
'content': 'content'
|
|
||||||
},
|
|
||||||
'guide_title': "📖 **VPN Setup Guide**",
|
|
||||||
'guide_choose_platform': "Select your device platform:",
|
|
||||||
'web_portal_description': "_Web portal shows your access list on one convenient page with some statistics._",
|
|
||||||
'servers_in_group': "🔒 **Servers in group:**",
|
|
||||||
|
|
||||||
# Admin messages
|
|
||||||
'admin_new_request_notification': "🔔 **New Access Request**\n\n👤 **User:** {user_info}\n📱 **Telegram:** {telegram_info}\n📅 **Date:** {date}\n\n💬 **Message:** {message}",
|
|
||||||
'admin_access_requests_title': "📋 **Pending Access Requests**",
|
|
||||||
'admin_no_pending_requests': "✅ No pending access requests",
|
|
||||||
'admin_request_item': "👤 **{user_info}**\n📅 {date}\n💬 _{message_preview}_",
|
|
||||||
'admin_choose_subscription_groups': "📦 **Choose Subscription Groups for {user_info}:**\n\nSelect groups to assign to this user:",
|
|
||||||
'admin_approval_success': "✅ **Request Approved!**\n\n👤 User: {user_info}\n📦 Groups: {groups}\n\nUser has been notified and given access.",
|
|
||||||
'admin_rejection_success': "❌ **Request Rejected**\n\n👤 User: {user_info}\n\nUser has been notified.",
|
|
||||||
'admin_request_already_processed': "⚠️ This request has already been processed by another admin.",
|
|
||||||
'admin_error_processing': "❌ Error processing request: {error}",
|
|
||||||
|
|
||||||
'android_guide': "🤖 **Android Setup Guide**\n\n**Step 1: Install the app**\nDownload V2RayTUN from Google Play:\nhttps://play.google.com/store/apps/details?id=com.v2raytun.android\n\n**Step 2: Add subscription**\n• Open the app\n• Tap the **+** button in the top right corner\n• Paste your subscription link from the bot\n• The app will automatically load all VPN servers\n\n**Step 3: Connect**\n• Choose a server from the list\n• Tap **Connect**\n• All your traffic will now go through VPN\n\n**💡 Useful settings:**\n• In settings, enable direct access for banking apps and local sites\n• You can choose specific apps to use VPN while others use direct connection\n\n**🔄 If VPN stops working:**\nTap the refresh icon next to the server list to update your subscription.",
|
|
||||||
'ios_guide': " **iOS Setup Guide**\n\n**Step 1: Install the app**\nDownload V2RayTUN from App Store:\nhttps://apps.apple.com/us/app/v2raytun/id6476628951\n\n**Step 2: Add subscription**\n• Open the app\n• Tap the **+** button in the top right corner\n• Paste your subscription link from the bot\n• The app will automatically load all VPN servers\n\n**Step 3: Connect**\n• Choose a server from the list\n• Tap **Connect**\n• All your traffic will now go through VPN\n\n**⚠️ Note for iOS users:**\nCurrently, only VLESS protocol works reliably on iOS. Other protocols may have connectivity issues.\n\n**💡 Useful settings:**\n• In settings, enable direct access for banking apps and local sites to improve performance\n\n**🔄 If VPN stops working:**\nTap the refresh icon next to the server list to update your subscription.",
|
|
||||||
'buttons': {
|
|
||||||
'access': "🌍 Get access",
|
|
||||||
'guide': "📖 Guide",
|
|
||||||
'android': "🤖 Android",
|
|
||||||
'ios': " iOS",
|
|
||||||
'web_portal': "🌐 Web Portal",
|
|
||||||
'all_in_one': "🌍 All-in-one",
|
|
||||||
'back': "⬅️ Back",
|
|
||||||
'group_prefix': "Group: ",
|
|
||||||
'request_access': "🔑 Request Access",
|
|
||||||
# Admin buttons
|
|
||||||
'access_requests': "📋 Access Requests",
|
|
||||||
'approve': "✅ Approve",
|
|
||||||
'reject': "❌ Reject",
|
|
||||||
'details': "👁 Details",
|
|
||||||
'confirm_approval': "✅ Confirm Approval",
|
|
||||||
'confirm_rejection': "❌ Confirm Rejection",
|
|
||||||
'cancel': "🚫 Cancel"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'ru': {
|
|
||||||
'help_text': "📋 Добро пожаловать! Используйте кнопки ниже для навигации.\n\n📊 Доступ - Просмотр VPN подписок\n\nДля поддержки обратитесь к администратору.",
|
|
||||||
'access_request_created': "Запрос на доступ создан, ожидайте.",
|
|
||||||
'new_user_welcome': "Добро пожаловать! Для получения доступа к VPN сервисам, пожалуйста запросите доступ с помощью кнопки ниже.",
|
|
||||||
'pending_request_msg': "Ваш запрос на доступ ожидает одобрения. Пожалуйста, дождитесь рассмотрения администратором.",
|
|
||||||
'choose_subscription': "**Выберите вариант подписки:**",
|
|
||||||
'all_in_one_desc': "🌍 **Все в одном** - Получить все подписки в одной ссылке",
|
|
||||||
'group_desc': "**Группа** - Получить подписку на группу",
|
|
||||||
'select_option': "Выберите вариант ниже:",
|
|
||||||
'no_subscriptions': "❌ У вас нет активных Xray подписок.\n\nОбратитесь к администратору для получения доступа.",
|
|
||||||
'group_subscription': "**Группа: {group_name}**",
|
|
||||||
'subscription_link': "**🔗 **",
|
|
||||||
'web_portal': "**🌐 Веб-портал пользователя:**",
|
|
||||||
'tap_to_copy': "_Нажмите на ссылку чтобы скопировать. Используйте в вашем Xray клиенте как подписку._",
|
|
||||||
'all_in_one_subscription': "🌍 **Подписка «Все в одном»**",
|
|
||||||
'your_access_includes': "**Ваш доступ включает:**",
|
|
||||||
'universal_subscription_link': "**🔗 Универсальная ссылка на подписку:**",
|
|
||||||
'all_subscriptions_note': "_Эта ссылка включает все ваши активные подписки. Нажмите чтобы скопировать._",
|
|
||||||
'error_loading_subscriptions': "❌ Ошибка загрузки подписок. Попробуйте позже.",
|
|
||||||
'error_loading_group': "❌ Ошибка загрузки подписки группы. Попробуйте позже.",
|
|
||||||
'received_content': "Получен ваш {message_type}. Администратор его рассмотрит.",
|
|
||||||
'approval_notification': "✅ Доступ одобрен!",
|
|
||||||
'content_types': {
|
|
||||||
'photo': 'фото',
|
|
||||||
'document': 'документ',
|
|
||||||
'voice': 'голосовое сообщение',
|
|
||||||
'video': 'видео',
|
|
||||||
'content': 'контент'
|
|
||||||
},
|
|
||||||
'guide_title': "📖 **Руководство по настройке VPN**",
|
|
||||||
'guide_choose_platform': "Выберите платформу вашего устройства:",
|
|
||||||
'web_portal_description': "_Веб-портал показывает список ваших доступов на одной удобной странице с некоторой статистикой._",
|
|
||||||
'servers_in_group': "🔒 **Серверы в группе:**",
|
|
||||||
|
|
||||||
# Admin messages
|
|
||||||
'admin_new_request_notification': "🔔 **Новый запрос на доступ**\n\n👤 **Пользователь:** {user_info}\n📱 **Telegram:** {telegram_info}\n📅 **Дата:** {date}\n\n💬 **Сообщение:** {message}",
|
|
||||||
'admin_access_requests_title': "📋 **Ожидающие запросы на доступ**",
|
|
||||||
'admin_no_pending_requests': "✅ Нет ожидающих запросов на доступ",
|
|
||||||
'admin_request_item': "👤 **{user_info}**\n📅 {date}\n💬 _{message_preview}_",
|
|
||||||
'admin_choose_subscription_groups': "📦 **Выберите группы подписки для {user_info}:**\n\nВыберите группы для назначения этому пользователю:",
|
|
||||||
'admin_approval_success': "✅ **Запрос одобрен!**\n\n👤 Пользователь: {user_info}\n📦 Группы: {groups}\n\nПользователь уведомлен и получил доступ.",
|
|
||||||
'admin_rejection_success': "❌ **Запрос отклонен**\n\n👤 Пользователь: {user_info}\n\nПользователь уведомлен.",
|
|
||||||
'admin_request_already_processed': "⚠️ Этот запрос уже обработан другим администратором.",
|
|
||||||
'admin_error_processing': "❌ Ошибка обработки запроса: {error}",
|
|
||||||
|
|
||||||
'android_guide': "🤖 **Руководство для Android**\n\n**Шаг 1: Установите приложение**\nСкачайте V2RayTUN из Google Play:\nhttps://play.google.com/store/apps/details?id=com.v2raytun.android\n\n**Шаг 2: Добавьте подписку**\n• Откройте приложение\n• Нажмите кнопку **+** в правом верхнем углу\n• Вставьте ссылку на подписку из бота\n• Приложение автоматически загрузит список VPN серверов\n\n**Шаг 3: Подключитесь**\n• Выберите сервер из списка\n• Нажмите **Подключиться**\n• Весь ваш трафик будет проходить через VPN\n\n**💡 Полезные настройки:**\n• В настройках включите прямой доступ для банковских приложений и местных сайтов\n• Вы можете выбрать конкретные приложения для использования VPN, в то время как остальные будут работать напрямую\n\n**🔄 Если VPN перестал работать:**\nНажмите иконку обновления рядом со списком серверов для обновления подписки.",
|
|
||||||
'ios_guide': " **Руководство для iOS**\n\n**Шаг 1: Установите приложение**\nСкачайте V2RayTUN из App Store:\nhttps://apps.apple.com/us/app/v2raytun/id6476628951\n\n**Шаг 2: Добавьте подписку**\n• Откройте приложение\n• Нажмите кнопку **+** в правом верхнем углу\n• Вставьте ссылку на подписку из бота\n• Приложение автоматически загрузит список VPN серверов\n\n**Шаг 3: Подключитесь**\n• Выберите сервер из списка\n• Нажмите **Подключиться**\n• Весь ваш трафик будет проходить через VPN\n\n**⚠️ Важно для пользователей iOS:**\nВ настоящее время на iOS стабильно работает только протокол VLESS. Другие протоколы могут иметь проблемы с подключением.\n\n**💡 Полезные настройки:**\n• В настройках включите прямой доступ для банковских приложений и местных сайтов для улучшения производительности\n\n**🔄 Если VPN перестал работать:**\nНажмите иконку обновления рядом со списком серверов для обновления подписки.",
|
|
||||||
'buttons': {
|
|
||||||
'access': "🌍 Получить VPN",
|
|
||||||
'guide': "📖 Гайд",
|
|
||||||
'android': "🤖 Android",
|
|
||||||
'ios': " iOS",
|
|
||||||
'web_portal': "🌐 Веб-портал",
|
|
||||||
'all_in_one': "🌍 Все в одном",
|
|
||||||
'back': "⬅️ Назад",
|
|
||||||
'group_prefix': "Группа: ",
|
|
||||||
'request_access': "🔑 Запросить доступ",
|
|
||||||
# Admin buttons
|
|
||||||
'access_requests': "📋 Запросы на доступ",
|
|
||||||
'approve': "✅ Одобрить",
|
|
||||||
'reject': "❌ Отклонить",
|
|
||||||
'details': "👁 Подробности",
|
|
||||||
'confirm_approval': "✅ Подтвердить одобрение",
|
|
||||||
'confirm_rejection': "❌ Подтвердить отклонение",
|
|
||||||
'cancel': "🚫 Отмена"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MessageLocalizer:
|
|
||||||
"""Class for bot message localization"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_user_language(telegram_user) -> str:
|
|
||||||
"""
|
|
||||||
Determines user language from Telegram language_code
|
|
||||||
|
|
||||||
Args:
|
|
||||||
telegram_user: Telegram user object
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Language code ('ru' or 'en')
|
|
||||||
"""
|
|
||||||
if not telegram_user:
|
|
||||||
return 'en'
|
|
||||||
|
|
||||||
language_code = getattr(telegram_user, 'language_code', None)
|
|
||||||
|
|
||||||
if not language_code:
|
|
||||||
return 'en'
|
|
||||||
|
|
||||||
# Support Russian and English
|
|
||||||
if language_code.startswith('ru'):
|
|
||||||
return 'ru'
|
|
||||||
else:
|
|
||||||
return 'en'
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_message(key: str, language: str = 'en', **kwargs) -> str:
|
|
||||||
"""
|
|
||||||
Gets localized message
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key: Message key
|
|
||||||
language: Language code
|
|
||||||
**kwargs: Formatting parameters
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Localized message
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Fallback to English if language not supported
|
|
||||||
if language not in MESSAGES:
|
|
||||||
language = 'en'
|
|
||||||
|
|
||||||
message = MESSAGES[language].get(key, MESSAGES['en'].get(key, f"Missing translation: {key}"))
|
|
||||||
|
|
||||||
# Format with parameters
|
|
||||||
if kwargs:
|
|
||||||
try:
|
|
||||||
message = message.format(**kwargs)
|
|
||||||
except (KeyError, ValueError) as e:
|
|
||||||
logger.warning(f"Error formatting message {key}: {e}")
|
|
||||||
|
|
||||||
return message
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting message {key} for language {language}: {e}")
|
|
||||||
return f"Error: {key}"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_button_text(button_key: str, language: str = 'en') -> str:
|
|
||||||
"""
|
|
||||||
Gets button text
|
|
||||||
|
|
||||||
Args:
|
|
||||||
button_key: Button key
|
|
||||||
language: Language code
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Button text
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if language not in MESSAGES:
|
|
||||||
language = 'en'
|
|
||||||
|
|
||||||
buttons = MESSAGES[language].get('buttons', {})
|
|
||||||
return buttons.get(button_key, MESSAGES['en']['buttons'].get(button_key, button_key))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting button text {button_key} for language {language}: {e}")
|
|
||||||
return button_key
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_content_type_name(content_type: str, language: str = 'en') -> str:
|
|
||||||
"""
|
|
||||||
Gets localized content type name
|
|
||||||
|
|
||||||
Args:
|
|
||||||
content_type: Content type
|
|
||||||
language: Language code
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Localized name
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if language not in MESSAGES:
|
|
||||||
language = 'en'
|
|
||||||
|
|
||||||
content_types = MESSAGES[language].get('content_types', {})
|
|
||||||
return content_types.get(content_type, content_type)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting content type {content_type} for language {language}: {e}")
|
|
||||||
return content_type
|
|
||||||
|
|
||||||
# Convenience functions for use in code
|
|
||||||
def get_localized_message(telegram_user, message_key: str, **kwargs) -> str:
|
|
||||||
"""Get localized message for user"""
|
|
||||||
language = MessageLocalizer.get_user_language(telegram_user)
|
|
||||||
return MessageLocalizer.get_message(message_key, language, **kwargs)
|
|
||||||
|
|
||||||
def get_localized_button(telegram_user, button_key: str) -> str:
|
|
||||||
"""Get localized button text for user"""
|
|
||||||
language = MessageLocalizer.get_user_language(telegram_user)
|
|
||||||
return MessageLocalizer.get_button_text(button_key, language)
|
|
||||||
|
|
||||||
def get_user_language(telegram_user) -> str:
|
|
||||||
"""Get user language"""
|
|
||||||
return MessageLocalizer.get_user_language(telegram_user)
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
import logging
|
|
||||||
import signal
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.utils import timezone
|
|
||||||
from telegram_bot.models import BotSettings, BotStatus
|
|
||||||
from telegram_bot.bot import TelegramBotManager
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = 'Run the Telegram bot'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.bot_manager = None
|
|
||||||
self.running = False
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
'--force',
|
|
||||||
action='store_true',
|
|
||||||
help='Force start even if bot is disabled in settings',
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
"""Main command handler"""
|
|
||||||
# Set up signal handlers
|
|
||||||
signal.signal(signal.SIGINT, self.signal_handler)
|
|
||||||
signal.signal(signal.SIGTERM, self.signal_handler)
|
|
||||||
|
|
||||||
# Check settings
|
|
||||||
settings = BotSettings.get_settings()
|
|
||||||
|
|
||||||
if not settings.bot_token:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR('Bot token is not configured. Please configure it in the admin panel.')
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if not settings.enabled and not options['force']:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.WARNING('Bot is disabled in settings. Use --force to override.')
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Initialize bot manager
|
|
||||||
self.bot_manager = TelegramBotManager()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Start the bot
|
|
||||||
self.stdout.write(self.style.SUCCESS('Starting Telegram bot...'))
|
|
||||||
self.bot_manager.start()
|
|
||||||
self.running = True
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(f'Bot is running. Press Ctrl+C to stop.')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Keep the main thread alive
|
|
||||||
while self.running:
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Check if bot is still running
|
|
||||||
if not self.bot_manager.is_running:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR('Bot stopped unexpectedly. Check logs for errors.')
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
self.stdout.write('\nReceived interrupt signal...')
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(f'Error running bot: {e}')
|
|
||||||
)
|
|
||||||
logger.error(f'Error running bot: {e}', exc_info=True)
|
|
||||||
|
|
||||||
# Update status
|
|
||||||
status = BotStatus.get_status()
|
|
||||||
status.is_running = False
|
|
||||||
status.last_error = str(e)
|
|
||||||
status.last_stopped = timezone.now()
|
|
||||||
status.save()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Stop the bot
|
|
||||||
if self.bot_manager:
|
|
||||||
self.stdout.write('Stopping bot...')
|
|
||||||
self.bot_manager.stop()
|
|
||||||
self.stdout.write(self.style.SUCCESS('Bot stopped.'))
|
|
||||||
|
|
||||||
def signal_handler(self, signum, frame):
|
|
||||||
"""Handle shutdown signals"""
|
|
||||||
self.stdout.write('\nShutting down gracefully...')
|
|
||||||
self.running = False
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.utils import timezone
|
|
||||||
from telegram_bot.models import BotSettings, BotStatus
|
|
||||||
from telegram_bot.bot import TelegramBotManager
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = 'Check Telegram bot status and optionally start it'
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
'--auto-start',
|
|
||||||
action='store_true',
|
|
||||||
help='Automatically start bot if enabled in settings',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--sync-status',
|
|
||||||
action='store_true',
|
|
||||||
help='Sync database status with real bot state',
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
"""Check bot status"""
|
|
||||||
try:
|
|
||||||
manager = TelegramBotManager()
|
|
||||||
settings = BotSettings.get_settings()
|
|
||||||
status = BotStatus.get_status()
|
|
||||||
|
|
||||||
# Show current configuration
|
|
||||||
self.stdout.write(f"Bot Configuration:")
|
|
||||||
self.stdout.write(f" Enabled: {settings.enabled}")
|
|
||||||
self.stdout.write(f" Token configured: {'Yes' if settings.bot_token else 'No'}")
|
|
||||||
|
|
||||||
# Show status
|
|
||||||
real_running = manager.is_running
|
|
||||||
db_running = status.is_running
|
|
||||||
|
|
||||||
self.stdout.write(f"\nBot Status:")
|
|
||||||
self.stdout.write(f" Database status: {'Running' if db_running else 'Stopped'}")
|
|
||||||
self.stdout.write(f" Real status: {'Running' if real_running else 'Stopped'}")
|
|
||||||
|
|
||||||
# Check lock file status
|
|
||||||
from django.conf import settings as django_settings
|
|
||||||
lock_dir = os.path.join(getattr(django_settings, 'BASE_DIR', '/tmp'), 'telegram_bot_locks')
|
|
||||||
lock_path = os.path.join(lock_dir, 'telegram_bot.lock')
|
|
||||||
|
|
||||||
if os.path.exists(lock_path):
|
|
||||||
try:
|
|
||||||
with open(lock_path, 'r') as f:
|
|
||||||
lock_pid = f.read().strip()
|
|
||||||
self.stdout.write(f" Lock file: exists (PID: {lock_pid})")
|
|
||||||
except:
|
|
||||||
self.stdout.write(f" Lock file: exists (unreadable)")
|
|
||||||
else:
|
|
||||||
self.stdout.write(f" Lock file: not found")
|
|
||||||
|
|
||||||
if db_running != real_running:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.WARNING("⚠️ Status mismatch detected!")
|
|
||||||
)
|
|
||||||
|
|
||||||
if options['sync_status']:
|
|
||||||
status.is_running = real_running
|
|
||||||
if not real_running:
|
|
||||||
status.last_stopped = timezone.now()
|
|
||||||
status.save()
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("✅ Status synchronized")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Show timestamps
|
|
||||||
if status.last_started:
|
|
||||||
self.stdout.write(f" Last started: {status.last_started}")
|
|
||||||
if status.last_stopped:
|
|
||||||
self.stdout.write(f" Last stopped: {status.last_stopped}")
|
|
||||||
if status.last_error:
|
|
||||||
self.stdout.write(f" Last error: {status.last_error}")
|
|
||||||
|
|
||||||
# Auto-start if requested
|
|
||||||
if options['auto_start']:
|
|
||||||
if not real_running and settings.enabled and settings.bot_token:
|
|
||||||
self.stdout.write("\nAttempting to start bot...")
|
|
||||||
try:
|
|
||||||
manager.start()
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("✅ Bot started successfully")
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(f"❌ Failed to start bot: {e}")
|
|
||||||
)
|
|
||||||
elif real_running:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("✅ Bot is already running")
|
|
||||||
)
|
|
||||||
elif not settings.enabled:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.WARNING("⚠️ Bot is disabled in settings")
|
|
||||||
)
|
|
||||||
elif not settings.bot_token:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR("❌ Bot token not configured")
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(f"❌ Error checking bot status: {e}")
|
|
||||||
)
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 11:18
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
initial = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='BotSettings',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('bot_token', models.CharField(help_text='Telegram Bot Token from @BotFather', max_length=255)),
|
|
||||||
('enabled', models.BooleanField(default=False, help_text='Enable/Disable the bot')),
|
|
||||||
('welcome_message', models.TextField(default='Hello! Your message has been received. An administrator will review it.', help_text='Message sent when user starts conversation')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'verbose_name': 'Bot Settings',
|
|
||||||
'verbose_name_plural': 'Bot Settings',
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='BotStatus',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('is_running', models.BooleanField(default=False)),
|
|
||||||
('last_started', models.DateTimeField(blank=True, null=True)),
|
|
||||||
('last_stopped', models.DateTimeField(blank=True, null=True)),
|
|
||||||
('last_error', models.TextField(blank=True)),
|
|
||||||
('last_update_id', models.BigIntegerField(blank=True, help_text='Last processed update ID from Telegram', null=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'verbose_name': 'Bot Status',
|
|
||||||
'verbose_name_plural': 'Bot Status',
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='TelegramMessage',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('direction', models.CharField(choices=[('incoming', 'Incoming'), ('outgoing', 'Outgoing')], db_index=True, max_length=10)),
|
|
||||||
('telegram_user_id', models.BigIntegerField(db_index=True)),
|
|
||||||
('telegram_username', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
|
|
||||||
('telegram_first_name', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('telegram_last_name', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('chat_id', models.BigIntegerField(db_index=True)),
|
|
||||||
('message_id', models.BigIntegerField(blank=True, null=True)),
|
|
||||||
('message_text', models.TextField(blank=True)),
|
|
||||||
('raw_data', models.JSONField(blank=True, default=dict, help_text='Full message data from Telegram')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
|
||||||
('linked_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='telegram_messages', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'verbose_name': 'Telegram Message',
|
|
||||||
'verbose_name_plural': 'Telegram Messages',
|
|
||||||
'ordering': ['-created_at'],
|
|
||||||
'indexes': [models.Index(fields=['-created_at', 'direction'], name='telegram_bo_created_19b81b_idx'), models.Index(fields=['telegram_user_id', '-created_at'], name='telegram_bo_telegra_f71f27_idx')],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 12:09
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0001_initial'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='api_base_url',
|
|
||||||
field=models.URLField(blank=True, default='https://api.telegram.org', help_text='Telegram API base URL (change for local bot API server)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='connection_timeout',
|
|
||||||
field=models.IntegerField(default=30, help_text='Connection timeout in seconds'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='proxy_url',
|
|
||||||
field=models.URLField(blank=True, help_text='Proxy URL (e.g., http://proxy:8080 or socks5://proxy:1080)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='use_proxy',
|
|
||||||
field=models.BooleanField(default=False, help_text='Enable proxy for Telegram API connections'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 12:24
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0002_add_connection_settings'),
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='AccessRequest',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('telegram_user_id', models.BigIntegerField(db_index=True, help_text='Telegram user ID who made the request')),
|
|
||||||
('telegram_username', models.CharField(blank=True, help_text='Telegram username (without @)', max_length=255, null=True)),
|
|
||||||
('telegram_first_name', models.CharField(blank=True, help_text='First name from Telegram', max_length=255, null=True)),
|
|
||||||
('telegram_last_name', models.CharField(blank=True, help_text='Last name from Telegram', max_length=255, null=True)),
|
|
||||||
('message_text', models.TextField(help_text='The message sent by user when requesting access')),
|
|
||||||
('chat_id', models.BigIntegerField(help_text='Telegram chat ID for sending notifications')),
|
|
||||||
('status', models.CharField(choices=[('pending', 'Pending'), ('approved', 'Approved'), ('rejected', 'Rejected')], db_index=True, default='pending', max_length=20)),
|
|
||||||
('admin_comment', models.TextField(blank=True, help_text='Admin comment for approval/rejection')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('processed_at', models.DateTimeField(blank=True, null=True)),
|
|
||||||
('created_user', models.ForeignKey(blank=True, help_text='User created from this request (when approved)', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
|
|
||||||
('first_message', models.ForeignKey(blank=True, help_text='First message from this user', null=True, on_delete=django.db.models.deletion.SET_NULL, to='telegram_bot.telegrammessage')),
|
|
||||||
('processed_by', models.ForeignKey(blank=True, help_text='Admin who processed this request', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='processed_requests', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'verbose_name': 'Access Request',
|
|
||||||
'verbose_name_plural': 'Access Requests',
|
|
||||||
'ordering': ['-created_at'],
|
|
||||||
'indexes': [models.Index(fields=['telegram_user_id'], name='telegram_bo_telegra_e3429d_idx'), models.Index(fields=['status', '-created_at'], name='telegram_bo_status_cf9310_idx'), models.Index(fields=['-created_at'], name='telegram_bo_created_c82a74_idx')],
|
|
||||||
'constraints': [models.UniqueConstraint(fields=('telegram_user_id',), name='unique_telegram_user_request')],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 13:49
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0003_accessrequest'),
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveIndex(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='telegram_bo_status_cf9310_idx',
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='status',
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='approved',
|
|
||||||
field=models.BooleanField(db_index=True, default=False, help_text='Request approved by administrator'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='admin_comment',
|
|
||||||
field=models.TextField(blank=True, help_text='Admin comment for approval'),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name='accessrequest',
|
|
||||||
index=models.Index(fields=['approved', '-created_at'], name='telegram_bo_approve_7ae92d_idx'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 22:24
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0004_remove_accessrequest_telegram_bo_status_cf9310_idx_and_more'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name='BotStatus',
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='welcome_message',
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='help_message',
|
|
||||||
field=models.TextField(default='📋 Available commands:\n/start - Start conversation\n📊 Access - View your VPN subscriptions\n\nFor support contact administrator.', help_text='Help message sent for unrecognized commands'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 22:30
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0005_delete_botstatus_remove_botsettings_welcome_message_and_more'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='desired_username',
|
|
||||||
field=models.CharField(blank=True, help_text='Desired username for VPN user (defaults to Telegram username)', max_length=150),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-14 22:54
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0006_accessrequest_desired_username'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='help_message',
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='user_language',
|
|
||||||
field=models.CharField(default='en', help_text="User's preferred language (en/ru)", max_length=10),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='telegrammessage',
|
|
||||||
name='user_language',
|
|
||||||
field=models.CharField(default='en', help_text="User's preferred language (en/ru)", max_length=10),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
# Generated migration for adding selected_existing_user field
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
('telegram_bot', '0007_remove_botsettings_help_message_and_more'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='selected_existing_user',
|
|
||||||
field=models.ForeignKey(
|
|
||||||
blank=True,
|
|
||||||
help_text='Existing user selected to link with this Telegram account',
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
|
||||||
related_name='selected_for_requests',
|
|
||||||
to=settings.AUTH_USER_MODEL
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-15 12:31
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0008_accessrequest_selected_existing_user'),
|
|
||||||
('vpn', '0026_alter_subscriptiongroup_options'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='selected_inbounds',
|
|
||||||
field=models.ManyToManyField(blank=True, help_text='Inbound templates to assign to the user', to='vpn.inbound'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='accessrequest',
|
|
||||||
name='selected_subscription_groups',
|
|
||||||
field=models.ManyToManyField(blank=True, help_text='Subscription groups to assign to the user', to='vpn.subscriptiongroup'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
# Generated by Django 5.1.7 on 2025-08-15 13:00
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('telegram_bot', '0009_accessrequest_selected_inbounds_and_more'),
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='botsettings',
|
|
||||||
name='telegram_admins',
|
|
||||||
field=models.ManyToManyField(blank=True, help_text='Users with linked Telegram accounts who will have admin access in the bot', related_name='bot_admin_settings', to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user