mirror of
https://github.com/house-of-vanity/OutFleet.git
synced 2025-12-16 17:37:51 +00:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
656209ee6e | ||
|
|
c189562ac2 | ||
|
|
856dcc9f44 | ||
|
|
5d826545b0 | ||
|
|
b9f0687788 | ||
|
|
2efd5873d5 | ||
|
|
c05d2f6223 | ||
|
|
7e8831b89e | ||
|
|
78bf75b24e | ||
|
|
c6892b1a73 | ||
|
|
dae787657c | ||
|
|
d80ac56b83 | ||
|
|
d972f10f83 | ||
|
|
42c8016d9c | ||
|
|
e4984dd29d | ||
|
|
76afa0797b | ||
|
|
59b8cbb582 | ||
|
|
572b5e19c0 | ||
|
|
2b5b09a213 | ||
|
|
0386ab4976 | ||
|
|
f59ef73c12 | ||
|
|
e20c8d69fd | ||
|
|
dedb7287f7 | ||
|
|
d5d6ebdf7b | ||
|
|
0e0e90c946 | ||
|
|
8aff8f2fb5 |
31
.env.example
Normal file
31
.env.example
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Environment Variables Example for Xray Admin Panel
|
||||||
|
# Copy this file to .env and modify the values as needed
|
||||||
|
|
||||||
|
# Database Configuration
|
||||||
|
DATABASE_URL=postgresql://xray_admin:password@localhost:5432/xray_admin
|
||||||
|
XRAY_ADMIN__DATABASE__MAX_CONNECTIONS=20
|
||||||
|
XRAY_ADMIN__DATABASE__CONNECTION_TIMEOUT=30
|
||||||
|
XRAY_ADMIN__DATABASE__AUTO_MIGRATE=true
|
||||||
|
|
||||||
|
# Web Server Configuration
|
||||||
|
XRAY_ADMIN__WEB__HOST=0.0.0.0
|
||||||
|
XRAY_ADMIN__WEB__PORT=8080
|
||||||
|
XRAY_ADMIN__WEB__JWT_SECRET=your-super-secret-jwt-key-change-this
|
||||||
|
XRAY_ADMIN__WEB__JWT_EXPIRY=86400
|
||||||
|
|
||||||
|
# Telegram Bot Configuration
|
||||||
|
TELEGRAM_BOT_TOKEN=1234567890:ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi
|
||||||
|
XRAY_ADMIN__TELEGRAM__WEBHOOK_URL=https://your-domain.com/telegram/webhook
|
||||||
|
|
||||||
|
# Xray Configuration
|
||||||
|
XRAY_ADMIN__XRAY__DEFAULT_API_PORT=62789
|
||||||
|
XRAY_ADMIN__XRAY__HEALTH_CHECK_INTERVAL=30
|
||||||
|
|
||||||
|
# Logging Configuration
|
||||||
|
XRAY_ADMIN__LOGGING__LEVEL=info
|
||||||
|
XRAY_ADMIN__LOGGING__FILE_PATH=./logs/xray-admin.log
|
||||||
|
XRAY_ADMIN__LOGGING__JSON_FORMAT=false
|
||||||
|
|
||||||
|
# Runtime Environment
|
||||||
|
RUST_ENV=development
|
||||||
|
ENVIRONMENT=development
|
||||||
51
.github/workflows/main.yml
vendored
51
.github/workflows/main.yml
vendored
@@ -1,51 +0,0 @@
|
|||||||
name: Docker hub build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'django'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
docker:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
-
|
|
||||||
name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
-
|
|
||||||
name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set outputs
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
echo "sha_full=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
echo "build_date=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
|
|
||||||
echo "branch_name=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
|
||||||
- name: Check outputs
|
|
||||||
run: |
|
|
||||||
echo "Short SHA: ${{ steps.vars.outputs.sha_short }}"
|
|
||||||
echo "Full SHA: ${{ steps.vars.outputs.sha_full }}"
|
|
||||||
echo "Build Date: ${{ steps.vars.outputs.build_date }}"
|
|
||||||
echo "Branch: ${{ steps.vars.outputs.branch_name }}"
|
|
||||||
-
|
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
cache-from: type=registry,ref=ultradesu/outfleet:buildcache
|
|
||||||
cache-to: type=registry,ref=ultradesu/outfleet:buildcache,mode=max
|
|
||||||
build-args: |
|
|
||||||
GIT_COMMIT=${{ steps.vars.outputs.sha_full }}
|
|
||||||
GIT_COMMIT_SHORT=${{ steps.vars.outputs.sha_short }}
|
|
||||||
BUILD_DATE=${{ steps.vars.outputs.build_date }}
|
|
||||||
BRANCH_NAME=${{ steps.vars.outputs.branch_name }}
|
|
||||||
tags: ultradesu/outfleet:v2,ultradesu/outfleet:${{ steps.vars.outputs.sha_short }}
|
|
||||||
61
.github/workflows/rust.yml
vendored
Normal file
61
.github/workflows/rust.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
name: Rust Docker Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'RUST'
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: docker.io
|
||||||
|
IMAGE_NAME: ultradesu/outfleet
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Extract version from Cargo.toml
|
||||||
|
id: extract_version
|
||||||
|
run: |
|
||||||
|
VERSION=$(grep '^version = ' Cargo.toml | head -1 | sed 's/version = "\(.*\)"/\1/')
|
||||||
|
echo "cargo_version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "Extracted version: $VERSION"
|
||||||
|
|
||||||
|
- name: Set build variables
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
echo "sha_full=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
echo "build_date=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
|
||||||
|
echo "branch_name=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
build-args: |
|
||||||
|
GIT_COMMIT=${{ steps.vars.outputs.sha_full }}
|
||||||
|
GIT_COMMIT_SHORT=${{ steps.vars.outputs.sha_short }}
|
||||||
|
BUILD_DATE=${{ steps.vars.outputs.build_date }}
|
||||||
|
BRANCH_NAME=${{ steps.vars.outputs.branch_name }}
|
||||||
|
CARGO_VERSION=${{ steps.extract_version.outputs.cargo_version }}
|
||||||
|
tags: |
|
||||||
|
${{ env.IMAGE_NAME }}:rs-${{ steps.extract_version.outputs.cargo_version }}
|
||||||
|
${{ env.IMAGE_NAME }}:rs-${{ steps.extract_version.outputs.cargo_version }}-${{ steps.vars.outputs.sha_short }}
|
||||||
|
${{ env.IMAGE_NAME }}:rust-latest
|
||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -1,21 +1,10 @@
|
|||||||
db.sqlite3
|
|
||||||
debug.log
|
|
||||||
*.swp
|
*.swp
|
||||||
*.swo
|
*.swo
|
||||||
*.pyc
|
|
||||||
staticfiles/
|
/target/
|
||||||
*.__pycache__.*
|
config.toml
|
||||||
celerybeat-schedule*
|
|
||||||
|
|
||||||
# macOS system files
|
# macOS system files
|
||||||
._*
|
._*
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
# Virtual environments
|
|
||||||
venv/
|
|
||||||
.venv/
|
|
||||||
env/
|
|
||||||
|
|
||||||
# Temporary files
|
|
||||||
/tmp/
|
|
||||||
*.tmp
|
|
||||||
|
|||||||
64
.vscode/launch.json
vendored
64
.vscode/launch.json
vendored
@@ -1,64 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "Django VPN app",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings",
|
|
||||||
"EXTERNAL_ADDRESS": "http://localhost:8000"
|
|
||||||
},
|
|
||||||
"args": [
|
|
||||||
"runserver",
|
|
||||||
"0.0.0.0:8000"
|
|
||||||
],
|
|
||||||
"django": true,
|
|
||||||
"autoStartBrowser": false,
|
|
||||||
"program": "${workspaceFolder}/manage.py"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Celery Worker",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"module": "celery",
|
|
||||||
"args": [
|
|
||||||
"-A", "mysite",
|
|
||||||
"worker",
|
|
||||||
"--loglevel=info"
|
|
||||||
],
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings"
|
|
||||||
},
|
|
||||||
"console": "integratedTerminal"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Celery Beat",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"module": "celery",
|
|
||||||
"args": [
|
|
||||||
"-A", "mysite",
|
|
||||||
"beat",
|
|
||||||
"--loglevel=info"
|
|
||||||
],
|
|
||||||
"env": {
|
|
||||||
"POSTGRES_PORT": "5433",
|
|
||||||
"DJANGO_SETTINGS_MODULE": "mysite.settings"
|
|
||||||
},
|
|
||||||
"console": "integratedTerminal"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"compounds": [
|
|
||||||
{
|
|
||||||
"name": "Run Django, Celery Worker, and Celery Beat",
|
|
||||||
"configurations": [
|
|
||||||
"Django VPN app",
|
|
||||||
"Celery Worker",
|
|
||||||
"Celery Beat"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
679
API.md
Normal file
679
API.md
Normal file
@@ -0,0 +1,679 @@
|
|||||||
|
# OutFleet Xray Admin API
|
||||||
|
|
||||||
|
Base URL: `http://localhost:8080`
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Complete API documentation for OutFleet - a web admin panel for managing xray-core VPN proxy servers.
|
||||||
|
|
||||||
|
## Base Endpoints
|
||||||
|
|
||||||
|
### Health Check
|
||||||
|
- `GET /health` - Service health check
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"status": "ok",
|
||||||
|
"service": "xray-admin",
|
||||||
|
"version": "0.1.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### User Subscription
|
||||||
|
- `GET /sub/{user_id}` - Get all user configuration links (subscription endpoint)
|
||||||
|
|
||||||
|
**Description:** Returns all VPN configuration links for a specific user, one per line. This endpoint is designed for VPN clients that support subscription URLs for automatic configuration updates.
|
||||||
|
|
||||||
|
**Path Parameters:**
|
||||||
|
- `user_id` (UUID) - The user's unique identifier
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
- **Content-Type:** `text/plain; charset=utf-8`
|
||||||
|
- **Success (200):** Base64 encoded string containing configuration URIs (one per line when decoded)
|
||||||
|
- **Not Found (404):** User doesn't exist
|
||||||
|
- **No Content:** Returns base64 encoded comment if no configurations available
|
||||||
|
|
||||||
|
**Example Response:**
|
||||||
|
```
|
||||||
|
dm1lc3M6Ly9leUoySWpvaU1pSXNJbkJ6SWpvaVUyVnlkbVZ5TVNJc0ltRmtaQ0k2SWpFeU55NHdMakF1TVM0eElpd2ljRzl5ZENJNklqUTBNeUlzSWxsa0lqb2lNVEl6TkRVMk56Z3RNVEl6TkMwMU5qYzRMVGxoWW1NdE1USXpORFUyTnpnNVlXSmpJaXdpWVdsa0lqb2lNQ0lzSW5Oamj0SWpvaVlYVjBieUlzSW01bGRDSTZJblJqY0NJc0luUjVjR1VpT2lKdWIyNWxJaXdpYUc5emRDSTZJaUlzSW5CaGRHZ2lPaUlpTEhKMGJITWlPaUowYkhNaUxGTnVhU0k2SWlKOQ0Kdmxlc3M6Ly91dWlkQGhvc3RuYW1lOnBvcnQ/ZW5jcnlwdGlvbj1ub25lJnNlY3VyaXR5PXRscyZ0eXBlPXRjcCZoZWFkZXJUeXBlPW5vbmUjU2VydmVyTmFtZQ0Kc3M6Ly9ZV1Z6TFRJMk5TMW5ZMjFBY0dGemMzZHZjbVE2TVRJNExqQXVNQzR5T2pnd09EQT0jU2VydmVyMg0K
|
||||||
|
```
|
||||||
|
|
||||||
|
**Decoded Example:**
|
||||||
|
```
|
||||||
|
vmess://eyJ2IjoiMiIsInBzIjoiU2VydmVyMSIsImFkZCI6IjEyNy4wLjAuMSIsInBvcnQiOiI0NDMiLCJpZCI6IjEyMzQ1Njc4LTEyMzQtNTY3OC05YWJjLTEyMzQ1Njc4OWFiYyIsImFpZCI6IjAiLCJzY3kiOiJhdXRvIiwibmV0IjoidGNwIiwidHlwZSI6Im5vbmUiLCJob3N0IjoiIiwicGF0aCI6IiIsInRscyI6InRscyIsInNuaSI6IiJ9
|
||||||
|
vless://uuid@hostname:port?encryption=none&security=tls&type=tcp&headerType=none#ServerName
|
||||||
|
ss://YWVzLTI1Ni1nY21AcGFzc3dvcmQ6MTI3LjAuMC4xOjgwODA=#Server2
|
||||||
|
```
|
||||||
|
|
||||||
|
**Usage:** This endpoint is intended for VPN client applications that support subscription URLs. Users can add this URL to their VPN client to automatically receive all their configurations and get updates when configurations change.
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
All API endpoints are prefixed with `/api`.
|
||||||
|
|
||||||
|
### Users
|
||||||
|
|
||||||
|
#### List Users
|
||||||
|
- `GET /users?page=1&per_page=20` - Get paginated list of users
|
||||||
|
|
||||||
|
#### Search Users
|
||||||
|
- `GET /users/search?q=john` - Universal search for users
|
||||||
|
|
||||||
|
**Search capabilities:**
|
||||||
|
- By name (partial match, case-insensitive): `?q=john`
|
||||||
|
- By telegram_id: `?q=123456789`
|
||||||
|
- By user UUID: `?q=550e8400-e29b-41d4-a716-446655440000`
|
||||||
|
|
||||||
|
**Response:** Array of user objects (limited to 100 results)
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"comment": "string|null",
|
||||||
|
"telegram_id": "number|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get User
|
||||||
|
- `GET /users/{id}` - Get user by ID
|
||||||
|
|
||||||
|
#### Create User
|
||||||
|
- `POST /users` - Create new user
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "John Doe",
|
||||||
|
"comment": "Admin user",
|
||||||
|
"telegram_id": 123456789
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update User
|
||||||
|
- `PUT /users/{id}` - Update user by ID
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Jane Doe",
|
||||||
|
"comment": null,
|
||||||
|
"telegram_id": 987654321
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete User
|
||||||
|
- `DELETE /users/{id}` - Delete user by ID
|
||||||
|
|
||||||
|
#### Get User Access
|
||||||
|
- `GET /users/{id}/access?include_uris=true` - Get user access to inbounds (optionally with client URIs)
|
||||||
|
|
||||||
|
**Query Parameters:**
|
||||||
|
- `include_uris`: boolean (optional) - Include client configuration URIs in response
|
||||||
|
|
||||||
|
**Response (without URIs):**
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"user_id": "uuid",
|
||||||
|
"server_inbound_id": "uuid",
|
||||||
|
"xray_user_id": "string",
|
||||||
|
"level": 0,
|
||||||
|
"is_active": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response (with URIs):**
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"user_id": "uuid",
|
||||||
|
"server_inbound_id": "uuid",
|
||||||
|
"xray_user_id": "string",
|
||||||
|
"level": 0,
|
||||||
|
"is_active": true,
|
||||||
|
"uri": "vless://uuid@hostname:port?parameters#alias",
|
||||||
|
"protocol": "vless",
|
||||||
|
"server_name": "Server Name",
|
||||||
|
"inbound_tag": "inbound-tag"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Generate Client Configurations
|
||||||
|
- `GET /users/{user_id}/configs` - Get all client configuration URIs for a user
|
||||||
|
- `GET /users/{user_id}/access/{inbound_id}/config` - Get specific client configuration URI
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"user_id": "uuid",
|
||||||
|
"server_name": "string",
|
||||||
|
"inbound_tag": "string",
|
||||||
|
"protocol": "vmess|vless|trojan|shadowsocks",
|
||||||
|
"uri": "protocol://uri_string",
|
||||||
|
"qr_code": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Servers
|
||||||
|
|
||||||
|
#### List Servers
|
||||||
|
- `GET /servers` - Get all servers
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"hostname": "string",
|
||||||
|
"grpc_hostname": "string",
|
||||||
|
"grpc_port": 2053,
|
||||||
|
"status": "online|offline|error|unknown",
|
||||||
|
"default_certificate_id": "uuid|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp",
|
||||||
|
"has_credentials": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Server
|
||||||
|
- `GET /servers/{id}` - Get server by ID
|
||||||
|
|
||||||
|
#### Create Server
|
||||||
|
- `POST /servers` - Create new server
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Server Name",
|
||||||
|
"hostname": "server.example.com",
|
||||||
|
"grpc_hostname": "192.168.1.100", // optional, defaults to hostname
|
||||||
|
"grpc_port": 2053, // optional, defaults to 2053
|
||||||
|
"api_credentials": "optional credentials",
|
||||||
|
"default_certificate_id": "uuid" // optional
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update Server
|
||||||
|
- `PUT /servers/{id}` - Update server
|
||||||
|
|
||||||
|
**Request:** (all fields optional)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "New Server Name",
|
||||||
|
"hostname": "new.server.com",
|
||||||
|
"grpc_hostname": "192.168.1.200",
|
||||||
|
"grpc_port": 2054,
|
||||||
|
"api_credentials": "new credentials",
|
||||||
|
"status": "online",
|
||||||
|
"default_certificate_id": "uuid"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Server
|
||||||
|
- `DELETE /servers/{id}` - Delete server
|
||||||
|
|
||||||
|
#### Test Server Connection
|
||||||
|
- `POST /servers/{id}/test` - Test connection to server
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"connected": true,
|
||||||
|
"endpoint": "192.168.1.100:2053"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Server Statistics
|
||||||
|
- `GET /servers/{id}/stats` - Get server statistics
|
||||||
|
|
||||||
|
### Server Inbounds
|
||||||
|
|
||||||
|
#### List Server Inbounds
|
||||||
|
- `GET /servers/{server_id}/inbounds` - Get all inbounds for a server
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"server_id": "uuid",
|
||||||
|
"template_id": "uuid",
|
||||||
|
"template_name": "string",
|
||||||
|
"tag": "string",
|
||||||
|
"port_override": 8080,
|
||||||
|
"certificate_id": "uuid|null",
|
||||||
|
"variable_values": {},
|
||||||
|
"is_active": true,
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Server Inbound
|
||||||
|
- `GET /servers/{server_id}/inbounds/{inbound_id}` - Get specific inbound
|
||||||
|
|
||||||
|
#### Create Server Inbound
|
||||||
|
- `POST /servers/{server_id}/inbounds` - Create new inbound for server
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"template_id": "uuid",
|
||||||
|
"port": 8080,
|
||||||
|
"certificate_id": "uuid", // optional
|
||||||
|
"is_active": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update Server Inbound
|
||||||
|
- `PUT /servers/{server_id}/inbounds/{inbound_id}` - Update inbound
|
||||||
|
|
||||||
|
**Request:** (all fields optional)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tag": "new-tag",
|
||||||
|
"port_override": 8081,
|
||||||
|
"certificate_id": "uuid",
|
||||||
|
"variable_values": {"domain": "example.com"},
|
||||||
|
"is_active": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Server Inbound
|
||||||
|
- `DELETE /servers/{server_id}/inbounds/{inbound_id}` - Delete inbound
|
||||||
|
|
||||||
|
### User-Inbound Management
|
||||||
|
|
||||||
|
#### Add User to Inbound
|
||||||
|
- `POST /servers/{server_id}/inbounds/{inbound_id}/users` - Grant user access to inbound
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"user_id": "uuid", // optional, will create new user if not provided
|
||||||
|
"name": "username",
|
||||||
|
"comment": "User description", // optional
|
||||||
|
"telegram_id": 123456789, // optional
|
||||||
|
"level": 0 // optional, defaults to 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Remove User from Inbound
|
||||||
|
- `DELETE /servers/{server_id}/inbounds/{inbound_id}/users/{email}` - Remove user access
|
||||||
|
|
||||||
|
#### Get Inbound Client Configurations
|
||||||
|
- `GET /servers/{server_id}/inbounds/{inbound_id}/configs` - Get all client configuration URIs for an inbound
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"user_id": "uuid",
|
||||||
|
"server_name": "string",
|
||||||
|
"inbound_tag": "string",
|
||||||
|
"protocol": "vmess|vless|trojan|shadowsocks",
|
||||||
|
"uri": "protocol://uri_string",
|
||||||
|
"qr_code": null
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Certificates
|
||||||
|
|
||||||
|
#### List Certificates
|
||||||
|
- `GET /certificates` - Get all certificates
|
||||||
|
|
||||||
|
#### Get Certificate
|
||||||
|
- `GET /certificates/{id}` - Get certificate by ID
|
||||||
|
|
||||||
|
#### Create Certificate
|
||||||
|
- `POST /certificates` - Create new certificate
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Certificate Name",
|
||||||
|
"cert_type": "self_signed|letsencrypt",
|
||||||
|
"domain": "example.com",
|
||||||
|
"auto_renew": true,
|
||||||
|
"certificate_pem": "-----BEGIN CERTIFICATE-----...",
|
||||||
|
"private_key": "-----BEGIN PRIVATE KEY-----..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update Certificate
|
||||||
|
- `PUT /certificates/{id}` - Update certificate
|
||||||
|
|
||||||
|
**Request:** (all fields optional)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "New Certificate Name",
|
||||||
|
"auto_renew": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Certificate
|
||||||
|
- `DELETE /certificates/{id}` - Delete certificate
|
||||||
|
|
||||||
|
#### Get Certificate Details
|
||||||
|
- `GET /certificates/{id}/details` - Get detailed certificate information
|
||||||
|
|
||||||
|
#### Get Expiring Certificates
|
||||||
|
- `GET /certificates/expiring` - Get certificates that are expiring soon
|
||||||
|
|
||||||
|
### Templates
|
||||||
|
|
||||||
|
#### List Templates
|
||||||
|
- `GET /templates` - Get all inbound templates
|
||||||
|
|
||||||
|
#### Get Template
|
||||||
|
- `GET /templates/{id}` - Get template by ID
|
||||||
|
|
||||||
|
#### Create Template
|
||||||
|
- `POST /templates` - Create new inbound template
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Template Name",
|
||||||
|
"protocol": "vmess|vless|trojan|shadowsocks",
|
||||||
|
"default_port": 8080,
|
||||||
|
"requires_tls": true,
|
||||||
|
"config_template": "JSON template string"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update Template
|
||||||
|
- `PUT /templates/{id}` - Update template
|
||||||
|
|
||||||
|
**Request:** (all fields optional)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "New Template Name",
|
||||||
|
"description": "Template description",
|
||||||
|
"default_port": 8081,
|
||||||
|
"base_settings": {},
|
||||||
|
"stream_settings": {},
|
||||||
|
"requires_tls": false,
|
||||||
|
"requires_domain": true,
|
||||||
|
"variables": [],
|
||||||
|
"is_active": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Template
|
||||||
|
- `DELETE /templates/{id}` - Delete template
|
||||||
|
|
||||||
|
## Response Format
|
||||||
|
|
||||||
|
### User Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"comment": "string|null",
|
||||||
|
"telegram_id": "number|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Users List Response
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"users": [UserObject],
|
||||||
|
"total": 100,
|
||||||
|
"page": 1,
|
||||||
|
"per_page": 20
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Server Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"hostname": "string",
|
||||||
|
"grpc_hostname": "string",
|
||||||
|
"grpc_port": 2053,
|
||||||
|
"status": "online|offline|error|unknown",
|
||||||
|
"default_certificate_id": "uuid|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp",
|
||||||
|
"has_credentials": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Server Inbound Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"server_id": "uuid",
|
||||||
|
"template_id": "uuid",
|
||||||
|
"template_name": "string",
|
||||||
|
"tag": "string",
|
||||||
|
"port_override": 8080,
|
||||||
|
"certificate_id": "uuid|null",
|
||||||
|
"variable_values": {},
|
||||||
|
"is_active": true,
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Certificate Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"cert_type": "self_signed|letsencrypt",
|
||||||
|
"domain": "string",
|
||||||
|
"auto_renew": true,
|
||||||
|
"expires_at": "timestamp|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Template Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "string",
|
||||||
|
"description": "string|null",
|
||||||
|
"protocol": "vmess|vless|trojan|shadowsocks",
|
||||||
|
"default_port": 8080,
|
||||||
|
"base_settings": {},
|
||||||
|
"stream_settings": {},
|
||||||
|
"requires_tls": true,
|
||||||
|
"requires_domain": false,
|
||||||
|
"variables": [],
|
||||||
|
"is_active": true,
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Inbound User Object
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"user_id": "uuid",
|
||||||
|
"server_inbound_id": "uuid",
|
||||||
|
"xray_user_id": "string",
|
||||||
|
"password": "string|null",
|
||||||
|
"level": 0,
|
||||||
|
"is_active": true,
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Status Codes
|
||||||
|
- `200` - Success
|
||||||
|
- `201` - Created
|
||||||
|
- `204` - No Content (successful deletion)
|
||||||
|
- `400` - Bad Request (invalid data)
|
||||||
|
- `404` - Not Found
|
||||||
|
- `409` - Conflict (duplicate data, e.g. telegram_id)
|
||||||
|
- `500` - Internal Server Error
|
||||||
|
|
||||||
|
## Error Response Format
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"error": "Error message description",
|
||||||
|
"code": "ERROR_CODE",
|
||||||
|
"details": "Additional error details"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Telegram Bot Integration
|
||||||
|
|
||||||
|
OutFleet includes a Telegram bot for user management and configuration access.
|
||||||
|
|
||||||
|
### User Management Endpoints
|
||||||
|
|
||||||
|
#### List User Requests
|
||||||
|
- `GET /api/user-requests` - Get all user access requests
|
||||||
|
- `GET /api/user-requests?status=pending` - Get pending requests only
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"id": "uuid",
|
||||||
|
"user_id": "uuid|null",
|
||||||
|
"telegram_id": 123456789,
|
||||||
|
"telegram_username": "username",
|
||||||
|
"telegram_first_name": "John",
|
||||||
|
"telegram_last_name": "Doe",
|
||||||
|
"full_name": "John Doe",
|
||||||
|
"telegram_link": "@username",
|
||||||
|
"status": "pending|approved|declined",
|
||||||
|
"request_message": "Access request message",
|
||||||
|
"response_message": "Admin response",
|
||||||
|
"processed_by_user_id": "uuid|null",
|
||||||
|
"processed_at": "timestamp|null",
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"total": 50,
|
||||||
|
"page": 1,
|
||||||
|
"per_page": 20
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get User Request
|
||||||
|
- `GET /api/user-requests/{id}` - Get specific user request
|
||||||
|
|
||||||
|
#### Approve User Request
|
||||||
|
- `POST /api/user-requests/{id}/approve` - Approve user access request
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"response_message": "Welcome! Your access has been approved."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response:** Updated user request object
|
||||||
|
|
||||||
|
**Side Effects:**
|
||||||
|
- Creates a new user account
|
||||||
|
- Sends Telegram notification with main menu to the user
|
||||||
|
|
||||||
|
#### Decline User Request
|
||||||
|
- `POST /api/user-requests/{id}/decline` - Decline user access request
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"response_message": "Sorry, your request has been declined."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response:** Updated user request object
|
||||||
|
|
||||||
|
**Side Effects:**
|
||||||
|
- Sends Telegram notification to the user
|
||||||
|
|
||||||
|
#### Delete User Request
|
||||||
|
- `DELETE /api/user-requests/{id}` - Delete user request
|
||||||
|
|
||||||
|
### Telegram Bot Configuration
|
||||||
|
|
||||||
|
#### Get Telegram Status
|
||||||
|
- `GET /api/telegram/status` - Get bot status and configuration
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"is_running": true,
|
||||||
|
"config": {
|
||||||
|
"id": "uuid",
|
||||||
|
"name": "Bot Name",
|
||||||
|
"bot_token": "masked",
|
||||||
|
"is_active": true,
|
||||||
|
"created_at": "timestamp",
|
||||||
|
"updated_at": "timestamp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create/Update Telegram Config
|
||||||
|
- `POST /api/telegram/config` - Create new bot configuration
|
||||||
|
- `PUT /api/telegram/config/{id}` - Update bot configuration
|
||||||
|
|
||||||
|
**Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "OutFleet Bot",
|
||||||
|
"bot_token": "bot_token_from_botfather",
|
||||||
|
"is_active": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Telegram Admin Management
|
||||||
|
- `GET /api/telegram/admins` - Get all Telegram admins
|
||||||
|
- `POST /api/telegram/admins/{user_id}` - Add user as Telegram admin
|
||||||
|
- `DELETE /api/telegram/admins/{user_id}` - Remove user from Telegram admins
|
||||||
|
|
||||||
|
### Telegram Bot Features
|
||||||
|
|
||||||
|
#### User Flow
|
||||||
|
1. **Request Access**: Users send `/start` to the bot and request VPN access
|
||||||
|
2. **Admin Approval**: Admins receive notifications and can approve/decline via Telegram or web interface
|
||||||
|
3. **Configuration Access**: Approved users get access to:
|
||||||
|
- **🔗 Subscription Link**: Personal subscription URL (`/sub/{user_id}`)
|
||||||
|
- **⚙️ My Configs**: Individual configuration management
|
||||||
|
- **💬 Support**: Contact support
|
||||||
|
|
||||||
|
#### Admin Features
|
||||||
|
- **📋 User Requests**: View and manage pending access requests
|
||||||
|
- **📊 Statistics**: View system statistics
|
||||||
|
- **📢 Broadcast**: Send messages to all users
|
||||||
|
- **Approval Workflow**: Approve/decline requests with server selection
|
||||||
|
|
||||||
|
#### Subscription Link Integration
|
||||||
|
When users click "🔗 Subscription Link" in the Telegram bot, they receive:
|
||||||
|
- Personal subscription URL: `{BASE_URL}/sub/{user_id}`
|
||||||
|
- Instructions in their preferred language (Russian/English)
|
||||||
|
- Automatic updates when configurations change
|
||||||
|
|
||||||
|
**Environment Variables:**
|
||||||
|
- `BASE_URL` - Base URL for subscription links (default: `http://localhost:8080`)
|
||||||
|
|
||||||
|
### Bot Commands
|
||||||
|
- `/start` - Start bot and show main menu
|
||||||
|
- `/requests` - [Admin] View pending user requests
|
||||||
|
- `/stats` - [Admin] Show system statistics
|
||||||
|
- `/broadcast <message>` - [Admin] Send message to all users
|
||||||
5630
Cargo.lock
generated
Normal file
5630
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
77
Cargo.toml
Normal file
77
Cargo.toml
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
[package]
|
||||||
|
name = "xray-admin"
|
||||||
|
version = "0.1.3"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# Async runtime
|
||||||
|
tokio = { version = "1.0", features = ["full"] }
|
||||||
|
tokio-cron-scheduler = "0.10"
|
||||||
|
|
||||||
|
# Serialization/deserialization
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
serde_yaml = "0.9"
|
||||||
|
toml = "0.8"
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
config = "0.14"
|
||||||
|
clap = { version = "4.0", features = ["derive", "env"] }
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
anyhow = "1.0"
|
||||||
|
thiserror = "1.0"
|
||||||
|
|
||||||
|
# Validation
|
||||||
|
validator = { version = "0.18", features = ["derive"] }
|
||||||
|
|
||||||
|
# URL parsing
|
||||||
|
url = "2.5"
|
||||||
|
|
||||||
|
# Database and ORM
|
||||||
|
sea-orm = { version = "1.0", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "with-uuid"] }
|
||||||
|
sea-orm-migration = "1.0"
|
||||||
|
|
||||||
|
# Additional utilities
|
||||||
|
uuid = { version = "1.0", features = ["v4", "v5", "serde"] }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
async-trait = "0.1"
|
||||||
|
log = "0.4"
|
||||||
|
urlencoding = "2.1"
|
||||||
|
rand = "0.8"
|
||||||
|
|
||||||
|
# Web server
|
||||||
|
axum = { version = "0.7", features = ["macros", "json"] }
|
||||||
|
tower = "0.4"
|
||||||
|
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||||
|
hyper = { version = "1.0", features = ["full"] }
|
||||||
|
|
||||||
|
# Xray integration
|
||||||
|
xray-core = "0.2.1" # gRPC client for Xray
|
||||||
|
tonic = "0.12" # gRPC client/server framework
|
||||||
|
prost = "0.13" # Protocol Buffers implementation
|
||||||
|
rcgen = { version = "0.12", features = ["pem"] } # For self-signed certificates
|
||||||
|
time = "0.3" # For certificate date/time handling
|
||||||
|
base64 = "0.21" # For PEM to DER conversion
|
||||||
|
|
||||||
|
# ACME/Let's Encrypt support
|
||||||
|
instant-acme = "0.8" # ACME client for Let's Encrypt
|
||||||
|
reqwest = { version = "0.11", features = ["json", "rustls-tls"] } # HTTP client for Cloudflare API
|
||||||
|
rustls = { version = "0.23", features = ["aws-lc-rs"] } # TLS library with aws-lc-rs crypto provider
|
||||||
|
ring = "0.17" # Crypto for ACME
|
||||||
|
pem = "3.0" # PEM format support
|
||||||
|
|
||||||
|
# Telegram bot support
|
||||||
|
teloxide = { version = "0.13", features = ["macros"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.0"
|
||||||
|
tokio-test = "0.4"
|
||||||
|
wiremock = "0.6"
|
||||||
|
axum-test = "14.0"
|
||||||
|
serial_test = "3.0"
|
||||||
|
mockall = "0.12"
|
||||||
85
Dockerfile
85
Dockerfile
@@ -1,40 +1,85 @@
|
|||||||
FROM python:3-alpine
|
# Use cargo-chef for dependency caching
|
||||||
|
FROM lukemathwalker/cargo-chef:0.1.68-rust-1.90-slim AS chef
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies needed for building
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
pkg-config \
|
||||||
|
libssl-dev \
|
||||||
|
protobuf-compiler \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Recipe preparation stage
|
||||||
|
FROM chef AS planner
|
||||||
|
COPY . .
|
||||||
|
RUN cargo chef prepare --recipe-path recipe.json
|
||||||
|
|
||||||
|
# Dependency building stage
|
||||||
|
FROM chef AS builder
|
||||||
|
|
||||||
# Build arguments
|
# Build arguments
|
||||||
ARG GIT_COMMIT="development"
|
ARG GIT_COMMIT="development"
|
||||||
ARG GIT_COMMIT_SHORT="dev"
|
ARG GIT_COMMIT_SHORT="dev"
|
||||||
ARG BUILD_DATE="unknown"
|
ARG BUILD_DATE="unknown"
|
||||||
ARG BRANCH_NAME="unknown"
|
ARG BRANCH_NAME="unknown"
|
||||||
|
ARG CARGO_VERSION="0.1.0"
|
||||||
|
|
||||||
# Environment variables from build args
|
# Environment variables from build args
|
||||||
ENV GIT_COMMIT=${GIT_COMMIT}
|
ENV GIT_COMMIT=${GIT_COMMIT}
|
||||||
ENV GIT_COMMIT_SHORT=${GIT_COMMIT_SHORT}
|
ENV GIT_COMMIT_SHORT=${GIT_COMMIT_SHORT}
|
||||||
ENV BUILD_DATE=${BUILD_DATE}
|
ENV BUILD_DATE=${BUILD_DATE}
|
||||||
ENV BRANCH_NAME=${BRANCH_NAME}
|
ENV BRANCH_NAME=${BRANCH_NAME}
|
||||||
|
ENV CARGO_VERSION=${CARGO_VERSION}
|
||||||
|
|
||||||
|
# Copy recipe and build dependencies (cached layer)
|
||||||
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
|
RUN cargo chef cook --release --recipe-path recipe.json
|
||||||
|
|
||||||
|
# Copy source and build application
|
||||||
|
COPY . .
|
||||||
|
RUN cargo build --release --locked
|
||||||
|
|
||||||
|
# Runtime stage - minimal Ubuntu image for glibc compatibility
|
||||||
|
FROM ubuntu:24.04 AS runtime
|
||||||
|
|
||||||
|
# Build arguments (needed for runtime stage)
|
||||||
|
ARG GIT_COMMIT="development"
|
||||||
|
ARG GIT_COMMIT_SHORT="dev"
|
||||||
|
ARG BUILD_DATE="unknown"
|
||||||
|
ARG BRANCH_NAME="unknown"
|
||||||
|
ARG CARGO_VERSION="0.1.0"
|
||||||
|
|
||||||
|
# Environment variables from build args
|
||||||
|
ENV GIT_COMMIT=${GIT_COMMIT}
|
||||||
|
ENV GIT_COMMIT_SHORT=${GIT_COMMIT_SHORT}
|
||||||
|
ENV BUILD_DATE=${BUILD_DATE}
|
||||||
|
ENV BRANCH_NAME=${BRANCH_NAME}
|
||||||
|
ENV CARGO_VERSION=${CARGO_VERSION}
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install system dependencies first (this layer will be cached)
|
# Install minimal runtime dependencies
|
||||||
RUN apk update && apk add git curl unzip
|
RUN apt-get update && apt-get install -y \
|
||||||
|
ca-certificates \
|
||||||
|
libssl3 \
|
||||||
|
libprotobuf32 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& apt-get clean
|
||||||
|
|
||||||
# Copy and install Python dependencies (this layer will be cached when requirements.txt doesn't change)
|
# Copy the binary from builder
|
||||||
COPY ./requirements.txt .
|
COPY --from=builder /app/target/release/xray-admin /app/xray-admin
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Install Xray-core
|
# Copy static files
|
||||||
RUN XRAY_VERSION=$(curl -s https://api.github.com/repos/XTLS/Xray-core/releases/latest | sed -n 's/.*"tag_name": "\([^"]*\)".*/\1/p') && \
|
COPY --from=builder /app/static ./static
|
||||||
curl -L -o /tmp/xray.zip "https://github.com/XTLS/Xray-core/releases/download/${XRAY_VERSION}/Xray-linux-64.zip" && \
|
|
||||||
cd /tmp && unzip xray.zip && \
|
|
||||||
ls -la /tmp/ && \
|
|
||||||
find /tmp -name "xray" -type f && \
|
|
||||||
cp xray /usr/local/bin/xray && \
|
|
||||||
chmod +x /usr/local/bin/xray && \
|
|
||||||
rm -rf /tmp/xray.zip /tmp/xray
|
|
||||||
|
|
||||||
# Copy the rest of the application code (this layer will change frequently)
|
# Copy config file
|
||||||
COPY . .
|
COPY config.docker.toml ./config.toml
|
||||||
|
|
||||||
# Run collectstatic
|
# Create non-root user for security
|
||||||
RUN python manage.py collectstatic --noinput
|
RUN groupadd -r outfleet && useradd -r -g outfleet -s /bin/false outfleet
|
||||||
|
RUN chown -R outfleet:outfleet /app
|
||||||
|
USER outfleet
|
||||||
|
|
||||||
CMD [ "python", "./manage.py", "runserver", "0.0.0.0:8000" ]
|
EXPOSE 8081
|
||||||
|
|
||||||
|
CMD ["/app/xray-admin", "--host", "0.0.0.0"]
|
||||||
13
LICENSE
13
LICENSE
@@ -1,13 +0,0 @@
|
|||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
Version 2, December 2004
|
|
||||||
|
|
||||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim or modified
|
|
||||||
copies of this license document, and changing it is allowed as long
|
|
||||||
as the name is changed.
|
|
||||||
|
|
||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
|
||||||
|
|
||||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
|
||||||
132
LLM_PROJECT_CONTEXT.md
Normal file
132
LLM_PROJECT_CONTEXT.md
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# LLM Project Context - Xray Admin Panel
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
Rust-based administration panel for managing xray-core VPN proxy servers. Uses real gRPC integration with xray-core library for server communication.
|
||||||
|
|
||||||
|
## Current Architecture
|
||||||
|
|
||||||
|
### Core Technologies
|
||||||
|
- **Language**: Rust (edition 2021)
|
||||||
|
- **Web Framework**: Axum with tower-http
|
||||||
|
- **Database**: PostgreSQL with Sea-ORM
|
||||||
|
- **Xray Integration**: xray-core 0.2.1 library with real gRPC communication
|
||||||
|
- **Frontend**: Vanilla HTML/CSS/JS with toast notifications
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
├── config/ # Configuration management (args, env, file)
|
||||||
|
├── database/ # Sea-ORM entities, repositories, migrations
|
||||||
|
├── services/ # Business logic (xray gRPC client, certificates)
|
||||||
|
├── web/ # Axum handlers and routes
|
||||||
|
└── main.rs # Application entry point
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Features Implemented
|
||||||
|
|
||||||
|
### 1. Database Entities
|
||||||
|
- **Users**: Basic user management
|
||||||
|
- **Servers**: Xray server definitions with gRPC endpoints
|
||||||
|
- **Certificates**: TLS certificates with PEM storage (binary format)
|
||||||
|
- **InboundTemplates**: Reusable inbound configurations
|
||||||
|
- **ServerInbounds**: Template bindings to servers with ports/certificates
|
||||||
|
|
||||||
|
### 2. Xray gRPC Integration
|
||||||
|
**Location**: `src/services/xray/client.rs`
|
||||||
|
- Real xray-core library integration (NOT mock/CLI)
|
||||||
|
- Methods: `add_inbound_with_certificate()`, `remove_inbound()`, `get_stats()`
|
||||||
|
- **CRITICAL**: TLS certificate configuration via streamSettings with proper protobuf messages
|
||||||
|
- Supports VLESS, VMess, Trojan, Shadowsocks protocols
|
||||||
|
|
||||||
|
### 3. Certificate Management
|
||||||
|
**Location**: `src/database/entities/certificate.rs`
|
||||||
|
- Self-signed certificate generation using rcgen
|
||||||
|
- Binary storage (cert_data, key_data as Vec<u8>)
|
||||||
|
- PEM conversion methods: `certificate_pem()`, `private_key_pem()`
|
||||||
|
- Separate endpoints: `/certificates/{id}` (basic) and `/certificates/{id}/details` (with PEM)
|
||||||
|
|
||||||
|
### 4. Template-Based Architecture
|
||||||
|
Templates define reusable inbound configurations that can be bound to servers with:
|
||||||
|
- Port overrides
|
||||||
|
- Certificate assignments
|
||||||
|
- Active/inactive states
|
||||||
|
|
||||||
|
## Current Status & Issues
|
||||||
|
|
||||||
|
### ✅ Working Features
|
||||||
|
- Complete CRUD for all entities
|
||||||
|
- Real xray gRPC communication with TLS certificate support
|
||||||
|
- Toast notification system (absolute positioning)
|
||||||
|
- Modal-based editing interface
|
||||||
|
- Password masking in database URL logging
|
||||||
|
- Certificate details display with PEM content
|
||||||
|
|
||||||
|
### 🔧 Recent Fixes
|
||||||
|
- **StreamConfig Integration**: Fixed TLS certificate configuration in xray gRPC calls
|
||||||
|
- **Certificate Display**: Added `/certificates/{id}/details` endpoint for PEM viewing
|
||||||
|
- **Active/Inactive Management**: Inbounds automatically added/removed from xray when toggled
|
||||||
|
|
||||||
|
### ⚠️ Current Issue
|
||||||
|
User reported certificate details still showing "Not available" - this was just fixed with the new `/certificates/{id}/details` endpoint.
|
||||||
|
|
||||||
|
## API Structure
|
||||||
|
|
||||||
|
### Endpoints
|
||||||
|
```
|
||||||
|
/api/users/* # User management
|
||||||
|
/api/servers/* # Server management
|
||||||
|
/api/servers/{id}/inbounds/* # Server inbound management
|
||||||
|
/api/certificates/* # Certificate management (basic)
|
||||||
|
/api/certificates/{id}/details # Certificate details with PEM
|
||||||
|
/api/templates/* # Template management
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
- **Default port**: 8080 (user tested on 8082)
|
||||||
|
- **Database**: PostgreSQL with auto-migration
|
||||||
|
- **Environment variables**: XRAY_ADMIN__* prefix
|
||||||
|
- **Config file**: config.toml support
|
||||||
|
|
||||||
|
## Testing Commands
|
||||||
|
```bash
|
||||||
|
# Run application
|
||||||
|
cargo run -- --host 0.0.0.0 --port 8082
|
||||||
|
|
||||||
|
# Test xray integration
|
||||||
|
xray api lsi --server 100.91.97.36:10085
|
||||||
|
|
||||||
|
# Check compilation
|
||||||
|
cargo check
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Implementation Details
|
||||||
|
|
||||||
|
### Xray TLS Configuration
|
||||||
|
**Location**: `src/services/xray/client.rs:185-194`
|
||||||
|
```rust
|
||||||
|
let stream_config = StreamConfig {
|
||||||
|
protocol_name: "tcp".to_string(),
|
||||||
|
security_type: "tls".to_string(),
|
||||||
|
security_settings: vec![tls_message],
|
||||||
|
// ... other fields
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Certificate Data Flow
|
||||||
|
1. User creates certificate via web interface
|
||||||
|
2. PEM data stored as binary in database (cert_data, key_data)
|
||||||
|
3. When creating inbound, certificate fetched and converted back to PEM
|
||||||
|
4. PEM passed to xray gRPC client for TLS configuration
|
||||||
|
|
||||||
|
### Database Migrations
|
||||||
|
Auto-migration enabled by default. All entities use UUID primary keys with timestamps.
|
||||||
|
|
||||||
|
## Development Notes
|
||||||
|
- **User prefers English in code/comments**
|
||||||
|
- **No emoji usage unless explicitly requested**
|
||||||
|
- **Prefer editing existing files over creating new ones**
|
||||||
|
- **Real xray-core integration required** (user specifically asked not to abandon it)
|
||||||
|
- **Application tested with actual xray server at 100.91.97.36:10085**
|
||||||
|
|
||||||
|
## Last Working State
|
||||||
|
All features implemented and compiling. StreamConfig properly configured for TLS certificate transmission to xray servers. Certificate viewing endpoint fixed for PEM display.
|
||||||
58
README.md
58
README.md
@@ -1,58 +0,0 @@
|
|||||||
<p align="center">
|
|
||||||
<h1 align="center">OutFleet: Master Your OutLine VPN</h1>
|
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
Streamline OutLine VPN experience. OutFleet offers centralized key control for many servers, users and always-updated Dynamic Access Keys instead of ss:// links
|
|
||||||
<br/>
|
|
||||||
<br/>
|
|
||||||
<a href="https://github.com/house-of-vanity/outfleet/issues">Request Feature</a>
|
|
||||||
</p>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
  
|
|
||||||
|
|
||||||
<img width="1282" height="840" alt="image" src="https://github.com/user-attachments/assets/3b66f928-853b-4af0-8968-1eacb2c16a1c" />
|
|
||||||
|
|
||||||
## About The Project
|
|
||||||
|
|
||||||
### Key Features
|
|
||||||
|
|
||||||
* Centralized Key Management
|
|
||||||
Administer user keys from one unified dashboard. Add, delete, and allocate users to specific servers effortlessly.
|
|
||||||
|
|
||||||
* 
|
|
||||||
Distribute ssconf:// links that are always up-to-date with your current server configurations. Eliminate the need for manual link updates.
|
|
||||||
|
|
||||||
### Why OutFleet?
|
|
||||||
Tired of juggling multiple home servers and the headache of individually managing users on each? OutFleet was born out of the frustration of not finding a suitable tool for efficiently managing a bunch of home servers.
|
|
||||||
|
|
||||||
## Built With
|
|
||||||
|
|
||||||
Django, Postgres SQL and hassle-free deployment using Kubernetes or docker-compose
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
#### Docker compose
|
|
||||||
Docker deploy is easy:
|
|
||||||
```
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
#### Kubernetes
|
|
||||||
I use ArgoCD for deployment. [Take a look](https://gt.hexor.cy/ab/homelab/src/branch/main/k8s/apps/vpn) to `outfleet.yaml` file for manifests.
|
|
||||||
|
|
||||||
|
|
||||||
#### Setup sslocal service on Windows
|
|
||||||
Shadowsocks servers can be used directly with **sslocal**. For automatic and regular password updates, you can create a Task Scheduler job to rotate the passwords when they change, as OutFleet manages the passwords automatically.
|
|
||||||
You may run script in Admin PowerShell to create Task for autorun **sslocal** and update connection details automatically using Outfleet API
|
|
||||||
```PowerShell
|
|
||||||
Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force; Invoke-Expression (Invoke-WebRequest -Uri "https://raw.githubusercontent.com/house-of-vanity/OutFleet/refs/heads/master/tools/windows-helper.ps1" -UseBasicParsing).Content
|
|
||||||
```
|
|
||||||
[Firefox PluginProxy Switcher and Manager](https://addons.mozilla.org/en-US/firefox/addon/proxy-switcher-and-manager/) && [Chrome plugin Proxy Switcher and Manager](https://chromewebstore.google.com/detail/proxy-switcher-and-manage/onnfghpihccifgojkpnnncpagjcdbjod)
|
|
||||||
|
|
||||||
Keep in mind that all user keys are stored in a single **config.yaml** file. If this file is lost, user keys will remain on the servers, but OutFleet will lose the ability to manage them. Handle with extreme caution and use backups.
|
|
||||||
|
|
||||||
## Authors
|
|
||||||
|
|
||||||
* **UltraDesu** - *Humble amateur developer* - [UltraDesu](https://github.com/house-of-vanity) - *Author*
|
|
||||||
* **Contributors**
|
|
||||||
* * @Sanapach
|
|
||||||
21
SECURITY.md
21
SECURITY.md
@@ -1,21 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
## Supported Versions
|
|
||||||
|
|
||||||
Use this section to tell people about which versions of your project are
|
|
||||||
currently being supported with security updates.
|
|
||||||
|
|
||||||
| Version | Supported |
|
|
||||||
| ------- | ------------------ |
|
|
||||||
| 5.1.x | :white_check_mark: |
|
|
||||||
| 5.0.x | :x: |
|
|
||||||
| 4.0.x | :white_check_mark: |
|
|
||||||
| < 4.0 | :x: |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
|
||||||
|
|
||||||
Use this section to tell people how to report a vulnerability.
|
|
||||||
|
|
||||||
Tell them where to go, how often they can expect to get an update on a
|
|
||||||
reported vulnerability, what to expect if the vulnerability is accepted or
|
|
||||||
declined, etc.
|
|
||||||
151
URI.md
Normal file
151
URI.md
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
# Xray Client URI Generation
|
||||||
|
|
||||||
|
## VMess URI Format
|
||||||
|
|
||||||
|
VMess URIs use two formats:
|
||||||
|
|
||||||
|
### 1. Query Parameter Format
|
||||||
|
```
|
||||||
|
vmess://uuid@hostname:port?parameters#alias
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `encryption=auto` - Encryption method
|
||||||
|
- `security=tls|none` - Security layer (TLS or none)
|
||||||
|
- `sni=domain` - Server Name Indication for TLS
|
||||||
|
- `fp=chrome|firefox|safari` - TLS fingerprint
|
||||||
|
- `type=ws|tcp|grpc|http` - Transport type
|
||||||
|
- `path=/path` - WebSocket/HTTP path
|
||||||
|
- `host=domain` - Host header for WebSocket
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```
|
||||||
|
vmess://2c981164-9b93-4bca-94ff-b78d3f8498d7@v2ray.codefyinc.com:443?encryption=auto&security=tls&sni=example.com&fp=chrome&type=ws&path=/ws&host=v2ray.codefyinc.com#MyServer
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Base64 JSON Format
|
||||||
|
```
|
||||||
|
vmess://base64(json_config)#alias
|
||||||
|
```
|
||||||
|
|
||||||
|
**JSON Structure:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"v": "2",
|
||||||
|
"ps": "Server Name",
|
||||||
|
"add": "hostname",
|
||||||
|
"port": "443",
|
||||||
|
"id": "uuid",
|
||||||
|
"aid": "0",
|
||||||
|
"scy": "auto",
|
||||||
|
"net": "ws",
|
||||||
|
"type": "none",
|
||||||
|
"host": "domain",
|
||||||
|
"path": "/path",
|
||||||
|
"tls": "tls",
|
||||||
|
"sni": "domain",
|
||||||
|
"alpn": "",
|
||||||
|
"fp": "chrome"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## VLESS URI Format
|
||||||
|
|
||||||
|
```
|
||||||
|
vless://uuid@hostname:port?parameters#alias
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Parameters:**
|
||||||
|
- `encryption=none` - VLESS uses no encryption
|
||||||
|
- `security=tls|reality|none` - Security layer
|
||||||
|
- `type=ws|tcp|grpc|http|httpupgrade|xhttp` - Transport type
|
||||||
|
- `flow=xtls-rprx-vision` - Flow control (for XTLS)
|
||||||
|
- `headerType=none|http` - Header type for TCP
|
||||||
|
- `mode=auto|gun|stream-one` - Transport mode
|
||||||
|
- `serviceName=name` - gRPC service name
|
||||||
|
- `authority=domain` - gRPC authority
|
||||||
|
- `spx=/path` - Split HTTP path (for xhttp)
|
||||||
|
|
||||||
|
**REALITY Parameters:**
|
||||||
|
- `pbk=public_key` - Public key
|
||||||
|
- `sid=short_id` - Short ID
|
||||||
|
- `fp=chrome|firefox|safari` - TLS fingerprint
|
||||||
|
- `sni=domain` - Server Name Indication
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
```
|
||||||
|
vless://uuid@server.com:443?type=tcp&security=none&headerType=none#Basic
|
||||||
|
vless://uuid@server.com:443?type=ws&security=tls&path=/ws&host=example.com#WebSocket
|
||||||
|
vless://uuid@server.com:443?type=grpc&security=reality&serviceName=grpcService&pbk=key&sid=id#gRPC-Reality
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generation Algorithm
|
||||||
|
|
||||||
|
1. **UUID**: Use `inbound_users.xray_user_id`
|
||||||
|
2. **Hostname**: From `servers.hostname`
|
||||||
|
3. **Port**: From `server_inbounds.port_override` or template default
|
||||||
|
4. **Transport**: From inbound template `stream_settings`
|
||||||
|
5. **Security**: Based on certificate configuration
|
||||||
|
6. **Path**: From WebSocket stream settings
|
||||||
|
7. **Alias**: User name + server name
|
||||||
|
|
||||||
|
## Shadowsocks URI Format
|
||||||
|
|
||||||
|
```
|
||||||
|
ss://password@hostname:port?parameters#alias
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `encryption=none` - Usually none for modern configs
|
||||||
|
- `security=tls|reality|none` - Security layer
|
||||||
|
- `type=ws|tcp|grpc|xhttp` - Transport type
|
||||||
|
- `path=/path` - WebSocket/HTTP path
|
||||||
|
- `host=domain` - Host header
|
||||||
|
- `mode=auto|gun|stream-one` - Transport mode
|
||||||
|
- `headerType=none|http` - Header type for TCP
|
||||||
|
- `flow=xtls-rprx-vision` - Flow control (for REALITY)
|
||||||
|
- `pbk=key` - Public key (for REALITY)
|
||||||
|
- `sid=id` - Short ID (for REALITY)
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```
|
||||||
|
ss://my-password@server.com:443?type=ws&security=tls&path=/ws&host=example.com#MyServer
|
||||||
|
```
|
||||||
|
|
||||||
|
## Trojan URI Format
|
||||||
|
|
||||||
|
```
|
||||||
|
trojan://password@hostname:port?parameters#alias
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `security=tls|reality|none` - Security layer
|
||||||
|
- `type=ws|tcp|grpc` - Transport type
|
||||||
|
- `sni=domain` - Server Name Indication
|
||||||
|
- `fp=chrome|firefox|randomized` - TLS fingerprint
|
||||||
|
- `flow=xtls-rprx-vision` - Flow control
|
||||||
|
- `allowInsecure=1` - Allow insecure connections
|
||||||
|
- `headerType=http|none` - Header type for TCP
|
||||||
|
- `mode=gun` - gRPC mode
|
||||||
|
- `serviceName=name` - gRPC service name
|
||||||
|
|
||||||
|
**WebSocket Parameters:**
|
||||||
|
- `path=/path` - WebSocket path
|
||||||
|
- `host=domain` - Host header
|
||||||
|
- `alpn=http/1.1|h2` - ALPN protocols
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
```
|
||||||
|
trojan://password@server.com:443?type=tcp&security=tls&sni=example.com#Basic
|
||||||
|
trojan://password@server.com:443?type=ws&security=tls&path=/ws&host=example.com&sni=example.com#WebSocket
|
||||||
|
trojan://password@server.com:443?type=grpc&security=tls&serviceName=grpcService&mode=gun&sni=example.com#gRPC
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
- VMess requires `aid=0` for modern clients
|
||||||
|
- VLESS doesn't use `aid` parameter
|
||||||
|
- Shadowsocks uses password instead of UUID
|
||||||
|
- Base64 encoding required for VMess JSON format
|
||||||
|
- URL encoding needed for special characters in parameters
|
||||||
|
- REALITY parameters: `pbk`, `sid`, `fp`, `sni`
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
platforms:
|
|
||||||
- name: amd64
|
|
||||||
architecture: amd64
|
|
||||||
- name: arm64
|
|
||||||
architecture: arm64
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
-- Проверить количество записей без acl_link_id
|
|
||||||
SELECT COUNT(*) as total_without_link
|
|
||||||
FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = '';
|
|
||||||
|
|
||||||
-- Проверить общее количество записей
|
|
||||||
SELECT COUNT(*) as total_records FROM vpn_accesslog;
|
|
||||||
|
|
||||||
-- Показать распределение по датам (последние записи без ссылок)
|
|
||||||
SELECT DATE(timestamp) as date, COUNT(*) as count
|
|
||||||
FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = ''
|
|
||||||
GROUP BY DATE(timestamp)
|
|
||||||
ORDER BY date DESC
|
|
||||||
LIMIT 10;
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
-- ВАРИАНТ 1: Удалить ВСЕ записи без acl_link_id
|
|
||||||
-- ОСТОРОЖНО! Это удалит все старые логи
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = '';
|
|
||||||
|
|
||||||
-- ВАРИАНТ 2: Удалить записи без acl_link_id старше 30 дней
|
|
||||||
-- Более безопасный вариант
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 30 DAY;
|
|
||||||
|
|
||||||
-- ВАРИАНТ 3: Удалить записи без acl_link_id старше 7 дней
|
|
||||||
-- Еще более консервативный подход
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 7 DAY;
|
|
||||||
|
|
||||||
-- ВАРИАНТ 4: Оставить только последние 1000 записей без ссылок (для истории)
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND id NOT IN (
|
|
||||||
SELECT id FROM (
|
|
||||||
SELECT id FROM vpn_accesslog
|
|
||||||
WHERE acl_link_id IS NULL OR acl_link_id = ''
|
|
||||||
ORDER BY timestamp DESC
|
|
||||||
LIMIT 1000
|
|
||||||
) AS recent_logs
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ВАРИАНТ 5: Поэтапное удаление (для больших БД)
|
|
||||||
-- Удаляем по 10000 записей за раз
|
|
||||||
DELETE FROM vpn_accesslog
|
|
||||||
WHERE (acl_link_id IS NULL OR acl_link_id = '')
|
|
||||||
AND timestamp < NOW() - INTERVAL 30 DAY
|
|
||||||
LIMIT 10000;
|
|
||||||
17
config.docker.toml
Normal file
17
config.docker.toml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[database]
|
||||||
|
url = "postgres://postgres:postgres@postgres:5432/outfleet"
|
||||||
|
|
||||||
|
[web]
|
||||||
|
host = "0.0.0.0"
|
||||||
|
port = 8081
|
||||||
|
|
||||||
|
[telegram]
|
||||||
|
enabled = false
|
||||||
|
admin_chat_ids = []
|
||||||
|
allowed_users = []
|
||||||
|
|
||||||
|
[xray]
|
||||||
|
config_path = "./templates"
|
||||||
|
|
||||||
|
[log]
|
||||||
|
level = "debug"
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
services:
|
|
||||||
web_ui:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-web
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- EXTERNAL_ADDRESS=http://127.0.0.1:8000
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 1 &&
|
|
||||||
python manage.py makemigrations &&
|
|
||||||
python manage.py migrate &&
|
|
||||||
python manage.py create_admin &&
|
|
||||||
python manage.py runserver 0.0.0.0:8000"
|
|
||||||
|
|
||||||
worker:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-worker
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 3 && celery -A mysite worker"
|
|
||||||
|
|
||||||
beat:
|
|
||||||
image: outfleet:local
|
|
||||||
container_name: outfleet-beat
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
environment:
|
|
||||||
- POSTGRES_HOST=postgres
|
|
||||||
- POSTGRES_USER=postgres
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
- .:/app
|
|
||||||
working_dir: /app
|
|
||||||
command: >
|
|
||||||
sh -c "sleep 3 && celery -A mysite beat"
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:15
|
|
||||||
container_name: postgres
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_DB: outfleet
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:7
|
|
||||||
container_name: redis
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "redis-cli", "ping"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
|
|
||||||
45
docker-compose.yml
Normal file
45
docker-compose.yml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
services:
|
||||||
|
admin:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: outfleet-admin
|
||||||
|
ports:
|
||||||
|
- "8081:8081"
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgres://postgres:postgres@postgres:5432/outfleet
|
||||||
|
- RUST_LOG=info
|
||||||
|
- XRAY_ADMIN__WEB__PORT=8081
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:15
|
||||||
|
container_name: postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_DB: outfleet
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
xray:
|
||||||
|
image: teddysun/xray:25.8.3
|
||||||
|
container_name: xray-server
|
||||||
|
ports:
|
||||||
|
- "10085:10085"
|
||||||
|
volumes:
|
||||||
|
- ./xray-config.json:/etc/xray/config.json
|
||||||
|
command: ["xray", "-c", "/etc/xray/config.json"]
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
22
manage.py
22
manage.py
@@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""Django's command-line utility for administrative tasks."""
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run administrative tasks."""
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
try:
|
|
||||||
from django.core.management import execute_from_command_line
|
|
||||||
except ImportError as exc:
|
|
||||||
raise ImportError(
|
|
||||||
"Couldn't import Django. Are you sure it's installed and "
|
|
||||||
"available on your PYTHONPATH environment variable? Did you "
|
|
||||||
"forget to activate a virtual environment?"
|
|
||||||
) from exc
|
|
||||||
execute_from_command_line(sys.argv)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from .celery import app as celery_app
|
|
||||||
|
|
||||||
__all__ = ('celery_app',)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
ASGI config for mysite project.
|
|
||||||
|
|
||||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/5.1/howto/deployment/asgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.asgi import get_asgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
|
|
||||||
application = get_asgi_application()
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from celery import Celery
|
|
||||||
from celery import shared_task
|
|
||||||
from celery.schedules import crontab
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
app = Celery('mysite')
|
|
||||||
|
|
||||||
app.conf.beat_schedule = {
|
|
||||||
'periodical_servers_sync': {
|
|
||||||
'task': 'sync_all_servers',
|
|
||||||
'schedule': crontab(minute=0, hour='*/3'), # Every 3 hours
|
|
||||||
},
|
|
||||||
'cleanup_old_task_logs': {
|
|
||||||
'task': 'cleanup_task_logs',
|
|
||||||
'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
||||||
|
|
||||||
# Additional celery settings for better logging and performance
|
|
||||||
app.conf.update(
|
|
||||||
# Keep detailed results for debugging
|
|
||||||
result_expires=3600, # 1 hour
|
|
||||||
task_always_eager=False,
|
|
||||||
task_eager_propagates=True,
|
|
||||||
# Improve task tracking
|
|
||||||
task_track_started=True,
|
|
||||||
task_send_sent_event=True,
|
|
||||||
# Clean up settings
|
|
||||||
result_backend_cleanup_interval=300, # Clean up every 5 minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
app.autodiscover_tasks()
|
|
||||||
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
from django.conf import settings
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
def version_info(request):
|
|
||||||
"""Add version information to template context"""
|
|
||||||
|
|
||||||
git_commit = getattr(settings, 'GIT_COMMIT', None)
|
|
||||||
git_commit_short = getattr(settings, 'GIT_COMMIT_SHORT', None)
|
|
||||||
build_date = getattr(settings, 'BUILD_DATE', None)
|
|
||||||
|
|
||||||
if not git_commit or git_commit == 'development':
|
|
||||||
try:
|
|
||||||
base_dir = getattr(settings, 'BASE_DIR', Path(__file__).resolve().parent.parent)
|
|
||||||
result = subprocess.run(['git', 'rev-parse', 'HEAD'],
|
|
||||||
capture_output=True, text=True, cwd=base_dir, timeout=5)
|
|
||||||
if result.returncode == 0:
|
|
||||||
git_commit = result.stdout.strip()
|
|
||||||
git_commit_short = git_commit[:7]
|
|
||||||
|
|
||||||
date_result = subprocess.run(['git', 'log', '-1', '--format=%ci'],
|
|
||||||
capture_output=True, text=True, cwd=base_dir, timeout=5)
|
|
||||||
if date_result.returncode == 0:
|
|
||||||
build_date = date_result.stdout.strip()
|
|
||||||
except (subprocess.TimeoutExpired, subprocess.SubprocessError, FileNotFoundError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not git_commit:
|
|
||||||
git_commit = 'development'
|
|
||||||
if not git_commit_short:
|
|
||||||
git_commit_short = 'dev'
|
|
||||||
if not build_date:
|
|
||||||
build_date = 'unknown'
|
|
||||||
|
|
||||||
return {
|
|
||||||
'VERSION_INFO': {
|
|
||||||
'git_commit': git_commit,
|
|
||||||
'git_commit_short': git_commit_short,
|
|
||||||
'build_date': build_date,
|
|
||||||
'is_development': git_commit_short == 'dev'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from django.contrib.auth import authenticate, login
|
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
|
||||||
|
|
||||||
class RequestLogger:
|
|
||||||
def __init__(self, get_response):
|
|
||||||
self.get_response = get_response
|
|
||||||
|
|
||||||
def __call__(self, request):
|
|
||||||
print(f"Original: {request.build_absolute_uri()}")
|
|
||||||
print(f"Path : {request.path}")
|
|
||||||
|
|
||||||
response = self.get_response(request)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class AutoLoginMiddleware(MiddlewareMixin):
|
|
||||||
def process_request(self, request):
|
|
||||||
if not request.user.is_authenticated:
|
|
||||||
user = authenticate(username='admin', password='admin')
|
|
||||||
if user:
|
|
||||||
login(request, user)
|
|
||||||
@@ -1,233 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
import os
|
|
||||||
import environ
|
|
||||||
from django.core.management.utils import get_random_secret_key
|
|
||||||
|
|
||||||
|
|
||||||
ENV = environ.Env(
|
|
||||||
DEBUG=(bool, False)
|
|
||||||
)
|
|
||||||
|
|
||||||
environ.Env.read_env()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
|
||||||
SECRET_KEY=ENV('SECRET_KEY', default='django-insecure-change-me-in-production')
|
|
||||||
TIME_ZONE = ENV('TIMEZONE', default='Asia/Nicosia')
|
|
||||||
EXTERNAL_ADDRESS = ENV('EXTERNAL_ADDRESS', default='https://example.org')
|
|
||||||
|
|
||||||
CELERY_BROKER_URL = ENV('CELERY_BROKER_URL', default='redis://localhost:6379/0')
|
|
||||||
CELERY_RESULT_BACKEND = 'django-db'
|
|
||||||
CELERY_TIMEZONE = ENV('TIMEZONE', default='Asia/Nicosia')
|
|
||||||
CELERY_ACCEPT_CONTENT = ['json']
|
|
||||||
CELERY_TASK_SERIALIZER = 'json'
|
|
||||||
CELERY_RESULT_SERIALIZER = 'json'
|
|
||||||
CELERY_RESULT_EXTENDED = True
|
|
||||||
|
|
||||||
# Celery Beat Schedule
|
|
||||||
from celery.schedules import crontab
|
|
||||||
CELERY_BEAT_SCHEDULE = {
|
|
||||||
'update-user-statistics': {
|
|
||||||
'task': 'update_user_statistics',
|
|
||||||
'schedule': crontab(minute='*/5'), # Every 5 minutes
|
|
||||||
},
|
|
||||||
'cleanup-task-logs': {
|
|
||||||
'task': 'cleanup_task_logs',
|
|
||||||
'schedule': crontab(hour=2, minute=0), # Daily at 2 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
AUTH_USER_MODEL = "vpn.User"
|
|
||||||
|
|
||||||
DEBUG = ENV('DEBUG')
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = ENV.list('ALLOWED_HOSTS', default=["*"])
|
|
||||||
|
|
||||||
CORS_ALLOW_ALL_ORIGINS = True
|
|
||||||
CORS_ALLOW_CREDENTIALS = True
|
|
||||||
CSRF_TRUSTED_ORIGINS = ENV.list('CSRF_TRUSTED_ORIGINS', default=[])
|
|
||||||
|
|
||||||
STATIC_ROOT = BASE_DIR / "staticfiles"
|
|
||||||
|
|
||||||
LOGIN_REDIRECT_URL = '/'
|
|
||||||
|
|
||||||
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
|
||||||
|
|
||||||
LOGGING = {
|
|
||||||
'version': 1,
|
|
||||||
'disable_existing_loggers': False,
|
|
||||||
'formatters': {
|
|
||||||
'verbose': {
|
|
||||||
'format': '[{asctime}] {levelname} {name} {message}',
|
|
||||||
'style': '{',
|
|
||||||
},
|
|
||||||
'simple': {
|
|
||||||
'format': '{levelname} {message}',
|
|
||||||
'style': '{',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'handlers': {
|
|
||||||
'console': {
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'class': 'logging.StreamHandler',
|
|
||||||
'formatter': 'verbose',
|
|
||||||
},
|
|
||||||
'file': {
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'class': 'logging.FileHandler',
|
|
||||||
'filename': os.path.join(BASE_DIR, 'debug.log'),
|
|
||||||
'formatter': 'verbose',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'loggers': {
|
|
||||||
'django': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': True,
|
|
||||||
},
|
|
||||||
'vpn': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'telegram_bot': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'DEBUG',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'requests': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
'urllib3': {
|
|
||||||
'handlers': ['console'],
|
|
||||||
'level': 'INFO',
|
|
||||||
'propagate': False,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
'jazzmin',
|
|
||||||
'django.contrib.admin',
|
|
||||||
'django.contrib.auth',
|
|
||||||
'django.contrib.contenttypes',
|
|
||||||
'django.contrib.sessions',
|
|
||||||
'django.contrib.messages',
|
|
||||||
'django.contrib.staticfiles',
|
|
||||||
'polymorphic',
|
|
||||||
'corsheaders',
|
|
||||||
'django_celery_results',
|
|
||||||
'django_celery_beat',
|
|
||||||
'vpn',
|
|
||||||
'telegram_bot',
|
|
||||||
]
|
|
||||||
|
|
||||||
MIDDLEWARE = [
|
|
||||||
'django.middleware.security.SecurityMiddleware',
|
|
||||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
|
||||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
|
||||||
'django.middleware.common.CommonMiddleware',
|
|
||||||
'django.middleware.csrf.CsrfViewMiddleware',
|
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
|
||||||
'mysite.middleware.AutoLoginMiddleware',
|
|
||||||
'django.contrib.messages.middleware.MessageMiddleware',
|
|
||||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
|
||||||
'corsheaders.middleware.CorsMiddleware',
|
|
||||||
|
|
||||||
]
|
|
||||||
|
|
||||||
ROOT_URLCONF = 'mysite.urls'
|
|
||||||
|
|
||||||
GIT_COMMIT = ENV('GIT_COMMIT', default='development')
|
|
||||||
GIT_COMMIT_SHORT = ENV('GIT_COMMIT_SHORT', default='dev')
|
|
||||||
BUILD_DATE = ENV('BUILD_DATE', default='unknown')
|
|
||||||
|
|
||||||
TEMPLATES = [
|
|
||||||
{
|
|
||||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
|
||||||
'DIRS': [
|
|
||||||
os.path.join(BASE_DIR, 'templates'),
|
|
||||||
os.path.join(BASE_DIR, 'vpn', 'templates')
|
|
||||||
],
|
|
||||||
'APP_DIRS': True,
|
|
||||||
'OPTIONS': {
|
|
||||||
'context_processors': [
|
|
||||||
'django.template.context_processors.debug',
|
|
||||||
'django.template.context_processors.request',
|
|
||||||
'django.contrib.auth.context_processors.auth',
|
|
||||||
'django.contrib.messages.context_processors.messages',
|
|
||||||
'mysite.context_processors.version_info',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
WSGI_APPLICATION = 'mysite.wsgi.application'
|
|
||||||
|
|
||||||
|
|
||||||
# Database
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#databases
|
|
||||||
|
|
||||||
# CREATE USER outfleet WITH PASSWORD 'password';
|
|
||||||
# GRANT ALL PRIVILEGES ON DATABASE outfleet TO outfleet;
|
|
||||||
# ALTER DATABASE outfleet OWNER TO outfleet;
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
'sqlite': {
|
|
||||||
'ENGINE': 'django.db.backends.sqlite3',
|
|
||||||
'NAME': BASE_DIR / 'db.sqlite3',
|
|
||||||
},
|
|
||||||
'default': {
|
|
||||||
'ENGINE': 'django.db.backends.postgresql',
|
|
||||||
'NAME': ENV('POSTGRES_DB', default="outfleet"),
|
|
||||||
'USER': ENV('POSTGRES_USER', default="outfleet"),
|
|
||||||
'PASSWORD': ENV('POSTGRES_PASSWORD', default="outfleet"),
|
|
||||||
'HOST': ENV('POSTGRES_HOST', default='localhost'),
|
|
||||||
'PORT': ENV('POSTGRES_PORT', default='5432'),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Password validation
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#auth-password-validators
|
|
||||||
|
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Internationalization
|
|
||||||
# https://docs.djangoproject.com/en/5.1/topics/i18n/
|
|
||||||
|
|
||||||
LANGUAGE_CODE = 'en-us'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
USE_I18N = True
|
|
||||||
|
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
|
||||||
# https://docs.djangoproject.com/en/5.1/howto/static-files/
|
|
||||||
|
|
||||||
STATIC_URL = '/static/'
|
|
||||||
STATICFILES_DIRS = [
|
|
||||||
BASE_DIR / 'static',
|
|
||||||
]
|
|
||||||
# Default primary key field type
|
|
||||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#default-auto-field
|
|
||||||
|
|
||||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
"""
|
|
||||||
URL configuration for mysite project.
|
|
||||||
|
|
||||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
|
||||||
https://docs.djangoproject.com/en/5.1/topics/http/urls/
|
|
||||||
Examples:
|
|
||||||
Function views
|
|
||||||
1. Add an import: from my_app import views
|
|
||||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
|
||||||
Class-based views
|
|
||||||
1. Add an import: from other_app.views import Home
|
|
||||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
|
||||||
Including another URLconf
|
|
||||||
1. Import the include() function: from django.urls import include, path
|
|
||||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
|
||||||
"""
|
|
||||||
from django.contrib import admin
|
|
||||||
from django.urls import path, include
|
|
||||||
from django.views.generic import RedirectView
|
|
||||||
from vpn.views import shadowsocks, userFrontend, userPortal, xray_subscription
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path('admin/', admin.site.urls),
|
|
||||||
path('ss/<path:link>', shadowsocks, name='shadowsocks'),
|
|
||||||
path('dynamic/<path:link>', shadowsocks, name='shadowsocks'),
|
|
||||||
path('xray/<str:user_hash>', xray_subscription, name='xray_subscription'),
|
|
||||||
path('stat/<path:user_hash>', userFrontend, name='userFrontend'),
|
|
||||||
path('u/<path:user_hash>', userPortal, name='userPortal'),
|
|
||||||
path('', RedirectView.as_view(url='/admin/', permanent=False)),
|
|
||||||
]
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
WSGI config for mysite project.
|
|
||||||
|
|
||||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/5.1/howto/deployment/wsgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
|
|
||||||
|
|
||||||
application = get_wsgi_application()
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
django-environ==0.12.0
|
|
||||||
Django==5.1.7
|
|
||||||
celery==5.4.0
|
|
||||||
django-jazzmin==3.0.1
|
|
||||||
django-polymorphic==3.1.0
|
|
||||||
django-cors-headers==4.5.0
|
|
||||||
django-celery-results==2.5.1
|
|
||||||
git+https://github.com/celery/django-celery-beat#egg=django-celery-beat
|
|
||||||
requests==2.32.3
|
|
||||||
PyYaml==6.0.2
|
|
||||||
Markdown==3.7
|
|
||||||
outline-vpn-api==6.3.0
|
|
||||||
Redis==5.2.1
|
|
||||||
whitenoise==6.9.0
|
|
||||||
psycopg2-binary==2.9.10
|
|
||||||
setuptools==75.2.0
|
|
||||||
shortuuid==1.0.13
|
|
||||||
cryptography==45.0.5
|
|
||||||
acme>=2.0.0
|
|
||||||
cloudflare>=4.3.1
|
|
||||||
josepy>=2.0.0
|
|
||||||
python-telegram-bot==21.10
|
|
||||||
67
src/config/args.rs
Normal file
67
src/config/args.rs
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
use clap::Parser;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "xray-admin")]
|
||||||
|
#[command(about = "A web admin panel for managing xray-core VPN proxy servers")]
|
||||||
|
#[command(version)]
|
||||||
|
pub struct Args {
|
||||||
|
/// Configuration file path
|
||||||
|
#[arg(short, long, value_name = "FILE")]
|
||||||
|
pub config: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Database connection URL
|
||||||
|
#[arg(long, env = "DATABASE_URL")]
|
||||||
|
pub database_url: Option<String>,
|
||||||
|
|
||||||
|
/// Web server host address
|
||||||
|
#[arg(long, default_value = "127.0.0.1")]
|
||||||
|
pub host: Option<String>,
|
||||||
|
|
||||||
|
/// Web server port
|
||||||
|
#[arg(short, long)]
|
||||||
|
pub port: Option<u16>,
|
||||||
|
|
||||||
|
/// Log level (trace, debug, info, warn, error)
|
||||||
|
#[arg(long, default_value = "info")]
|
||||||
|
pub log_level: Option<String>,
|
||||||
|
|
||||||
|
/// Base URL for the application (used in subscription links and Telegram messages)
|
||||||
|
#[arg(long, env = "BASE_URL")]
|
||||||
|
pub base_url: Option<String>,
|
||||||
|
|
||||||
|
/// Validate configuration and exit
|
||||||
|
#[arg(long)]
|
||||||
|
pub validate_config: bool,
|
||||||
|
|
||||||
|
/// Print default configuration and exit
|
||||||
|
#[arg(long)]
|
||||||
|
pub print_default_config: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_args() -> Args {
|
||||||
|
Args::parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_args_parsing() {
|
||||||
|
let args = Args::try_parse_from(&[
|
||||||
|
"xray-admin",
|
||||||
|
"--config",
|
||||||
|
"test.toml",
|
||||||
|
"--port",
|
||||||
|
"9090",
|
||||||
|
"--log-level",
|
||||||
|
"debug",
|
||||||
|
])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(args.config, Some(PathBuf::from("test.toml")));
|
||||||
|
assert_eq!(args.port, Some(9090));
|
||||||
|
assert_eq!(args.log_level, Some("debug".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
122
src/config/env.rs
Normal file
122
src/config/env.rs
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
use std::env;
|
||||||
|
|
||||||
|
/// Environment variable utilities
|
||||||
|
pub struct EnvVars;
|
||||||
|
|
||||||
|
impl EnvVars {
|
||||||
|
/// Get environment variable with fallback
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_or_default(key: &str, default: &str) -> String {
|
||||||
|
env::var(key).unwrap_or_else(|_| default.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get required environment variable
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_required(key: &str) -> Result<String, env::VarError> {
|
||||||
|
env::var(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if running in development mode
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_development() -> bool {
|
||||||
|
matches!(
|
||||||
|
env::var("RUST_ENV").as_deref(),
|
||||||
|
Ok("development") | Ok("dev")
|
||||||
|
) || matches!(
|
||||||
|
env::var("ENVIRONMENT").as_deref(),
|
||||||
|
Ok("development") | Ok("dev")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if running in production mode
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_production() -> bool {
|
||||||
|
matches!(
|
||||||
|
env::var("RUST_ENV").as_deref(),
|
||||||
|
Ok("production") | Ok("prod")
|
||||||
|
) || matches!(
|
||||||
|
env::var("ENVIRONMENT").as_deref(),
|
||||||
|
Ok("production") | Ok("prod")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database URL from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn database_url() -> Option<String> {
|
||||||
|
env::var("DATABASE_URL")
|
||||||
|
.ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__DATABASE__URL").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get telegram bot token from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn telegram_token() -> Option<String> {
|
||||||
|
env::var("TELEGRAM_BOT_TOKEN")
|
||||||
|
.ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__TELEGRAM__BOT_TOKEN").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get JWT secret from environment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn jwt_secret() -> Option<String> {
|
||||||
|
env::var("JWT_SECRET")
|
||||||
|
.ok()
|
||||||
|
.or_else(|| env::var("XRAY_ADMIN__WEB__JWT_SECRET").ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print environment info for debugging
|
||||||
|
pub fn print_env_info() {
|
||||||
|
tracing::debug!("Environment information:");
|
||||||
|
tracing::debug!(" RUST_ENV: {:?}", env::var("RUST_ENV"));
|
||||||
|
tracing::debug!(" ENVIRONMENT: {:?}", env::var("ENVIRONMENT"));
|
||||||
|
tracing::debug!(
|
||||||
|
" DATABASE_URL: {}",
|
||||||
|
if env::var("DATABASE_URL").is_ok() {
|
||||||
|
"set"
|
||||||
|
} else {
|
||||||
|
"not set"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
tracing::debug!(
|
||||||
|
" TELEGRAM_BOT_TOKEN: {}",
|
||||||
|
if env::var("TELEGRAM_BOT_TOKEN").is_ok() {
|
||||||
|
"set"
|
||||||
|
} else {
|
||||||
|
"not set"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
tracing::debug!(
|
||||||
|
" JWT_SECRET: {}",
|
||||||
|
if env::var("JWT_SECRET").is_ok() {
|
||||||
|
"set"
|
||||||
|
} else {
|
||||||
|
"not set"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_or_default() {
|
||||||
|
let result = EnvVars::get_or_default("NON_EXISTENT_VAR", "default_value");
|
||||||
|
assert_eq!(result, "default_value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_environment_detection() {
|
||||||
|
env::set_var("RUST_ENV", "development");
|
||||||
|
assert!(EnvVars::is_development());
|
||||||
|
assert!(!EnvVars::is_production());
|
||||||
|
|
||||||
|
env::set_var("RUST_ENV", "production");
|
||||||
|
assert!(!EnvVars::is_development());
|
||||||
|
assert!(EnvVars::is_production());
|
||||||
|
|
||||||
|
env::remove_var("RUST_ENV");
|
||||||
|
}
|
||||||
|
}
|
||||||
184
src/config/file.rs
Normal file
184
src/config/file.rs
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
use anyhow::{Context, Result};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use super::AppConfig;
|
||||||
|
|
||||||
|
/// Configuration file utilities
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub struct ConfigFile;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ConfigFile {
|
||||||
|
/// Load configuration from TOML file
|
||||||
|
pub fn load_toml<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = toml::from_str(&content).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to parse TOML config file: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load configuration from YAML file
|
||||||
|
pub fn load_yaml<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = serde_yaml::from_str(&content).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to parse YAML config file: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load configuration from JSON file
|
||||||
|
pub fn load_json<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let content = fs::read_to_string(&path)
|
||||||
|
.with_context(|| format!("Failed to read config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
let config: AppConfig = serde_json::from_str(&content).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to parse JSON config file: {}",
|
||||||
|
path.as_ref().display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Auto-detect format and load configuration file
|
||||||
|
pub fn load_auto<P: AsRef<Path>>(path: P) -> Result<AppConfig> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
match path.extension().and_then(|ext| ext.to_str()) {
|
||||||
|
Some("toml") => Self::load_toml(path),
|
||||||
|
Some("yaml") | Some("yml") => Self::load_yaml(path),
|
||||||
|
Some("json") => Self::load_json(path),
|
||||||
|
_ => {
|
||||||
|
// Try TOML first, then YAML, then JSON
|
||||||
|
Self::load_toml(path)
|
||||||
|
.or_else(|_| Self::load_yaml(path))
|
||||||
|
.or_else(|_| Self::load_json(path))
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to load config file '{}' - tried TOML, YAML, and JSON formats",
|
||||||
|
path.display()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to TOML file
|
||||||
|
pub fn save_toml<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content =
|
||||||
|
toml::to_string_pretty(config).context("Failed to serialize config to TOML")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to YAML file
|
||||||
|
pub fn save_yaml<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content =
|
||||||
|
serde_yaml::to_string(config).context("Failed to serialize config to YAML")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save configuration to JSON file
|
||||||
|
pub fn save_json<P: AsRef<Path>>(config: &AppConfig, path: P) -> Result<()> {
|
||||||
|
let content =
|
||||||
|
serde_json::to_string_pretty(config).context("Failed to serialize config to JSON")?;
|
||||||
|
|
||||||
|
fs::write(&path, content)
|
||||||
|
.with_context(|| format!("Failed to write config file: {}", path.as_ref().display()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if config file exists and is readable
|
||||||
|
pub fn exists_and_readable<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
let path = path.as_ref();
|
||||||
|
path.exists()
|
||||||
|
&& path.is_file()
|
||||||
|
&& fs::metadata(path)
|
||||||
|
.map(|m| !m.permissions().readonly())
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find default config file in common locations
|
||||||
|
pub fn find_default() -> Option<std::path::PathBuf> {
|
||||||
|
let candidates = [
|
||||||
|
"config.toml",
|
||||||
|
"config.yaml",
|
||||||
|
"config.yml",
|
||||||
|
"config.json",
|
||||||
|
"xray-admin.toml",
|
||||||
|
"xray-admin.yaml",
|
||||||
|
"xray-admin.yml",
|
||||||
|
"/etc/xray-admin/config.toml",
|
||||||
|
"/etc/xray-admin/config.yaml",
|
||||||
|
"~/.config/xray-admin/config.toml",
|
||||||
|
];
|
||||||
|
|
||||||
|
for candidate in &candidates {
|
||||||
|
let path = std::path::Path::new(candidate);
|
||||||
|
if Self::exists_and_readable(path) {
|
||||||
|
return Some(path.to_path_buf());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_save_and_load_toml() -> Result<()> {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
let temp_file = NamedTempFile::new()?;
|
||||||
|
|
||||||
|
ConfigFile::save_toml(&config, temp_file.path())?;
|
||||||
|
let loaded_config = ConfigFile::load_toml(temp_file.path())?;
|
||||||
|
|
||||||
|
assert_eq!(config.web.port, loaded_config.web.port);
|
||||||
|
assert_eq!(
|
||||||
|
config.database.max_connections,
|
||||||
|
loaded_config.database.max_connections
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_auto_detect_format() -> Result<()> {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
|
||||||
|
// Test with .toml extension
|
||||||
|
let temp_file = NamedTempFile::with_suffix(".toml")?;
|
||||||
|
ConfigFile::save_toml(&config, temp_file.path())?;
|
||||||
|
let loaded_config = ConfigFile::load_auto(temp_file.path())?;
|
||||||
|
assert_eq!(config.web.port, loaded_config.web.port);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
262
src/config/mod.rs
Normal file
262
src/config/mod.rs
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use validator::Validate;
|
||||||
|
|
||||||
|
pub mod args;
|
||||||
|
pub mod env;
|
||||||
|
pub mod file;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct AppConfig {
|
||||||
|
pub database: DatabaseConfig,
|
||||||
|
pub web: WebConfig,
|
||||||
|
pub telegram: TelegramConfig,
|
||||||
|
pub xray: XrayConfig,
|
||||||
|
pub logging: LoggingConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct DatabaseConfig {
|
||||||
|
#[validate(url)]
|
||||||
|
pub url: String,
|
||||||
|
#[validate(range(min = 1, max = 100))]
|
||||||
|
pub max_connections: u32,
|
||||||
|
#[validate(range(min = 1))]
|
||||||
|
pub connection_timeout: u64,
|
||||||
|
pub auto_migrate: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct WebConfig {
|
||||||
|
#[validate(ip)]
|
||||||
|
pub host: String,
|
||||||
|
#[validate(range(min = 1024, max = 65535))]
|
||||||
|
pub port: u16,
|
||||||
|
pub cors_origins: Vec<String>,
|
||||||
|
pub jwt_secret: String,
|
||||||
|
#[validate(range(min = 3600))]
|
||||||
|
pub jwt_expiry: u64,
|
||||||
|
/// Base URL for the application (used in subscription links and Telegram messages)
|
||||||
|
/// Example: "https://vpn.hexor.cy"
|
||||||
|
#[validate(url)]
|
||||||
|
pub base_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct TelegramConfig {
|
||||||
|
pub bot_token: String,
|
||||||
|
pub webhook_url: Option<String>,
|
||||||
|
pub admin_chat_ids: Vec<i64>,
|
||||||
|
pub allowed_users: Vec<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
|
||||||
|
pub struct XrayConfig {
|
||||||
|
pub default_api_port: u16,
|
||||||
|
pub config_template_path: PathBuf,
|
||||||
|
pub certificates_path: PathBuf,
|
||||||
|
#[validate(range(min = 1))]
|
||||||
|
pub health_check_interval: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LoggingConfig {
|
||||||
|
pub level: String,
|
||||||
|
pub file_path: Option<PathBuf>,
|
||||||
|
pub json_format: bool,
|
||||||
|
pub max_file_size: Option<u64>,
|
||||||
|
pub max_files: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DatabaseConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
url: "postgresql://xray:password@localhost/xray_admin".to_string(),
|
||||||
|
max_connections: 10,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for WebConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
host: "127.0.0.1".to_string(),
|
||||||
|
port: 8080,
|
||||||
|
cors_origins: vec!["http://localhost:3000".to_string()],
|
||||||
|
jwt_secret: "your-secret-key-change-in-production".to_string(),
|
||||||
|
jwt_expiry: 86400, // 24 hours
|
||||||
|
base_url: "http://localhost:8080".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TelegramConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
bot_token: "".to_string(),
|
||||||
|
webhook_url: None,
|
||||||
|
admin_chat_ids: vec![],
|
||||||
|
allowed_users: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for XrayConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
default_api_port: 62789,
|
||||||
|
config_template_path: PathBuf::from("./templates"),
|
||||||
|
certificates_path: PathBuf::from("./certs"),
|
||||||
|
health_check_interval: 30,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoggingConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
level: "info".to_string(),
|
||||||
|
file_path: None,
|
||||||
|
json_format: false,
|
||||||
|
max_file_size: Some(10 * 1024 * 1024), // 10MB
|
||||||
|
max_files: Some(5),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AppConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
database: DatabaseConfig::default(),
|
||||||
|
web: WebConfig::default(),
|
||||||
|
telegram: TelegramConfig::default(),
|
||||||
|
xray: XrayConfig::default(),
|
||||||
|
logging: LoggingConfig::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppConfig {
|
||||||
|
/// Load configuration from multiple sources with priority:
|
||||||
|
/// 1. Command line arguments (highest)
|
||||||
|
/// 2. Environment variables
|
||||||
|
/// 3. Configuration file
|
||||||
|
/// 4. Default values (lowest)
|
||||||
|
pub fn load() -> Result<Self> {
|
||||||
|
let args = args::parse_args();
|
||||||
|
|
||||||
|
let mut builder = config::Config::builder()
|
||||||
|
// Start with defaults
|
||||||
|
.add_source(config::Config::try_from(&AppConfig::default())?);
|
||||||
|
|
||||||
|
// Add configuration file if specified or exists
|
||||||
|
if let Some(config_file) = &args.config {
|
||||||
|
builder = builder.add_source(config::File::from(config_file.as_path()));
|
||||||
|
} else if std::path::Path::new("config.toml").exists() {
|
||||||
|
builder = builder.add_source(config::File::with_name("config"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add environment variables with prefix
|
||||||
|
builder = builder.add_source(
|
||||||
|
config::Environment::with_prefix("XRAY_ADMIN")
|
||||||
|
.separator("__")
|
||||||
|
.try_parsing(true),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Override with command line arguments
|
||||||
|
if let Some(host) = &args.host {
|
||||||
|
builder = builder.set_override("web.host", host.as_str())?;
|
||||||
|
}
|
||||||
|
if let Some(port) = args.port {
|
||||||
|
builder = builder.set_override("web.port", port)?;
|
||||||
|
}
|
||||||
|
if let Some(db_url) = &args.database_url {
|
||||||
|
builder = builder.set_override("database.url", db_url.as_str())?;
|
||||||
|
}
|
||||||
|
if let Some(log_level) = &args.log_level {
|
||||||
|
builder = builder.set_override("logging.level", log_level.as_str())?;
|
||||||
|
}
|
||||||
|
if let Some(base_url) = &args.base_url {
|
||||||
|
builder = builder.set_override("web.base_url", base_url.as_str())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let config: AppConfig = builder.build()?.try_deserialize()?;
|
||||||
|
|
||||||
|
// Validate configuration
|
||||||
|
config.validate()?;
|
||||||
|
|
||||||
|
Ok(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn display_summary(&self) {
|
||||||
|
tracing::info!("Configuration loaded:");
|
||||||
|
tracing::info!(" Database URL: {}", mask_sensitive(&self.database.url));
|
||||||
|
tracing::info!(" Web server: {}:{}", self.web.host, self.web.port);
|
||||||
|
tracing::info!(" Log level: {}", self.logging.level);
|
||||||
|
tracing::info!(
|
||||||
|
" Telegram bot: {}",
|
||||||
|
if self.telegram.bot_token.is_empty() {
|
||||||
|
"disabled"
|
||||||
|
} else {
|
||||||
|
"enabled"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
tracing::info!(
|
||||||
|
" Xray config path: {}",
|
||||||
|
self.xray.config_template_path.display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mask sensitive information in URLs for logging
|
||||||
|
fn mask_sensitive(url: &str) -> String {
|
||||||
|
// Simple string-based approach to mask passwords
|
||||||
|
if let Some(scheme_end) = url.find("://") {
|
||||||
|
let after_scheme = &url[scheme_end + 3..];
|
||||||
|
if let Some(at_pos) = after_scheme.find('@') {
|
||||||
|
let auth_part = &after_scheme[..at_pos];
|
||||||
|
if let Some(colon_pos) = auth_part.find(':') {
|
||||||
|
// Found user:password@host pattern
|
||||||
|
let user = &auth_part[..colon_pos];
|
||||||
|
let host_part = &after_scheme[at_pos..];
|
||||||
|
return format!("{}://{}:***{}", &url[..scheme_end], user, host_part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to URL parsing if simple approach fails
|
||||||
|
if let Ok(parsed) = url::Url::parse(url) {
|
||||||
|
if parsed.password().is_some() {
|
||||||
|
let mut masked = parsed.clone();
|
||||||
|
masked.set_password(Some("***")).unwrap();
|
||||||
|
masked.to_string()
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_config_validation() {
|
||||||
|
let config = AppConfig::default();
|
||||||
|
// Default configuration should be valid
|
||||||
|
assert!(config.validate().is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_sensitive() {
|
||||||
|
let url = "postgresql://user:password@localhost/db";
|
||||||
|
let masked = mask_sensitive(url);
|
||||||
|
assert!(masked.contains("***"));
|
||||||
|
assert!(!masked.contains("password"));
|
||||||
|
}
|
||||||
|
}
|
||||||
251
src/database/entities/certificate.rs
Normal file
251
src/database/entities/certificate.rs
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "certificates")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
#[sea_orm(column_name = "cert_type")]
|
||||||
|
pub cert_type: String,
|
||||||
|
|
||||||
|
pub domain: String,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub cert_data: Vec<u8>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub key_data: Vec<u8>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub chain_data: Option<Vec<u8>>,
|
||||||
|
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub auto_renew: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(has_many = "super::server::Entity")]
|
||||||
|
Servers,
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Servers.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum CertificateType {
|
||||||
|
SelfSigned,
|
||||||
|
Imported,
|
||||||
|
LetsEncrypt,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CertificateType> for String {
|
||||||
|
fn from(cert_type: CertificateType) -> Self {
|
||||||
|
match cert_type {
|
||||||
|
CertificateType::SelfSigned => "self_signed".to_string(),
|
||||||
|
CertificateType::Imported => "imported".to_string(),
|
||||||
|
CertificateType::LetsEncrypt => "letsencrypt".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for CertificateType {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"self_signed" => CertificateType::SelfSigned,
|
||||||
|
"imported" => CertificateType::Imported,
|
||||||
|
"letsencrypt" => CertificateType::LetsEncrypt,
|
||||||
|
_ => CertificateType::SelfSigned,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateCertificateDto {
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub certificate_pem: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub private_key: String,
|
||||||
|
// For Let's Encrypt certificates via DNS challenge
|
||||||
|
pub dns_provider_id: Option<Uuid>,
|
||||||
|
pub acme_email: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateCertificateDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub auto_renew: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CertificateResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub has_cert_data: bool,
|
||||||
|
pub has_key_data: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CertificateDetailsResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub cert_type: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub expires_at: DateTimeUtc,
|
||||||
|
pub auto_renew: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub certificate_pem: String,
|
||||||
|
pub has_private_key: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for CertificateResponse {
|
||||||
|
fn from(cert: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: cert.id,
|
||||||
|
name: cert.name,
|
||||||
|
cert_type: cert.cert_type,
|
||||||
|
domain: cert.domain,
|
||||||
|
expires_at: cert.expires_at,
|
||||||
|
auto_renew: cert.auto_renew,
|
||||||
|
created_at: cert.created_at,
|
||||||
|
updated_at: cert.updated_at,
|
||||||
|
has_cert_data: !cert.cert_data.is_empty(),
|
||||||
|
has_key_data: !cert.key_data.is_empty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for CertificateDetailsResponse {
|
||||||
|
fn from(cert: Model) -> Self {
|
||||||
|
let certificate_pem = cert.certificate_pem();
|
||||||
|
let has_private_key = !cert.key_data.is_empty();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
id: cert.id,
|
||||||
|
name: cert.name,
|
||||||
|
cert_type: cert.cert_type,
|
||||||
|
domain: cert.domain,
|
||||||
|
expires_at: cert.expires_at,
|
||||||
|
auto_renew: cert.auto_renew,
|
||||||
|
created_at: cert.created_at,
|
||||||
|
updated_at: cert.updated_at,
|
||||||
|
certificate_pem,
|
||||||
|
has_private_key,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn is_expired(&self) -> bool {
|
||||||
|
self.expires_at < chrono::Utc::now()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn expires_soon(&self, days: i64) -> bool {
|
||||||
|
let threshold = chrono::Utc::now() + chrono::Duration::days(days);
|
||||||
|
self.expires_at < threshold
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get certificate data as PEM string
|
||||||
|
pub fn certificate_pem(&self) -> String {
|
||||||
|
String::from_utf8_lossy(&self.cert_data).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get private key data as PEM string
|
||||||
|
pub fn private_key_pem(&self) -> String {
|
||||||
|
String::from_utf8_lossy(&self.key_data).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_update(self, dto: UpdateCertificateDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(auto_renew) = dto.auto_renew {
|
||||||
|
active_model.auto_renew = Set(auto_renew);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateCertificateDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateCertificateDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
cert_type: Set(dto.cert_type),
|
||||||
|
domain: Set(dto.domain),
|
||||||
|
cert_data: Set(dto.certificate_pem.into_bytes()),
|
||||||
|
key_data: Set(dto.private_key.into_bytes()),
|
||||||
|
chain_data: Set(None),
|
||||||
|
expires_at: Set(chrono::Utc::now() + chrono::Duration::days(90)), // Default 90 days
|
||||||
|
auto_renew: Set(dto.auto_renew),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
158
src/database/entities/dns_provider.rs
Normal file
158
src/database/entities/dns_provider.rs
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "dns_providers")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub provider_type: String, // "cloudflare", "route53", etc.
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub api_token: String, // Encrypted storage in production
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DTOs for API requests/responses
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct CreateDnsProviderDto {
|
||||||
|
pub name: String,
|
||||||
|
pub provider_type: String,
|
||||||
|
pub api_token: String,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateDnsProviderDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub api_token: Option<String>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct DnsProviderResponseDto {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub provider_type: String,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub has_token: bool, // Don't expose actual token
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateDnsProviderDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateDnsProviderDto) -> Self {
|
||||||
|
ActiveModel {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
name: Set(dto.name),
|
||||||
|
provider_type: Set(dto.provider_type),
|
||||||
|
api_token: Set(dto.api_token),
|
||||||
|
is_active: Set(dto.is_active.unwrap_or(true)),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateDnsProviderDto
|
||||||
|
pub fn apply_update(self, dto: UpdateDnsProviderDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(api_token) = dto.api_token {
|
||||||
|
active_model.api_token = Set(api_token);
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to response DTO (without exposing API token)
|
||||||
|
pub fn to_response_dto(&self) -> DnsProviderResponseDto {
|
||||||
|
DnsProviderResponseDto {
|
||||||
|
id: self.id,
|
||||||
|
name: self.name.clone(),
|
||||||
|
provider_type: self.provider_type.clone(),
|
||||||
|
is_active: self.is_active,
|
||||||
|
created_at: self.created_at,
|
||||||
|
updated_at: self.updated_at,
|
||||||
|
has_token: !self.api_token.is_empty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Supported DNS provider types
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum DnsProviderType {
|
||||||
|
#[serde(rename = "cloudflare")]
|
||||||
|
Cloudflare,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DnsProviderType {
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
DnsProviderType::Cloudflare => "cloudflare",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_str(s: &str) -> Option<Self> {
|
||||||
|
match s {
|
||||||
|
"cloudflare" => Some(DnsProviderType::Cloudflare),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all() -> Vec<Self> {
|
||||||
|
vec![DnsProviderType::Cloudflare]
|
||||||
|
}
|
||||||
|
}
|
||||||
284
src/database/entities/inbound_template.rs
Normal file
284
src/database/entities/inbound_template.rs
Normal file
@@ -0,0 +1,284 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "inbound_templates")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub description: Option<String>,
|
||||||
|
|
||||||
|
pub protocol: String,
|
||||||
|
|
||||||
|
pub default_port: i32,
|
||||||
|
|
||||||
|
pub base_settings: Value,
|
||||||
|
|
||||||
|
pub stream_settings: Value,
|
||||||
|
|
||||||
|
pub requires_tls: bool,
|
||||||
|
|
||||||
|
pub requires_domain: bool,
|
||||||
|
|
||||||
|
pub variables: Value,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum Protocol {
|
||||||
|
Vless,
|
||||||
|
Vmess,
|
||||||
|
Trojan,
|
||||||
|
Shadowsocks,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Protocol> for String {
|
||||||
|
fn from(protocol: Protocol) -> Self {
|
||||||
|
match protocol {
|
||||||
|
Protocol::Vless => "vless".to_string(),
|
||||||
|
Protocol::Vmess => "vmess".to_string(),
|
||||||
|
Protocol::Trojan => "trojan".to_string(),
|
||||||
|
Protocol::Shadowsocks => "shadowsocks".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for Protocol {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"vless" => Protocol::Vless,
|
||||||
|
"vmess" => Protocol::Vmess,
|
||||||
|
"trojan" => Protocol::Trojan,
|
||||||
|
"shadowsocks" => Protocol::Shadowsocks,
|
||||||
|
_ => Protocol::Vless,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TemplateVariable {
|
||||||
|
pub key: String,
|
||||||
|
pub var_type: VariableType,
|
||||||
|
pub required: bool,
|
||||||
|
pub default_value: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum VariableType {
|
||||||
|
String,
|
||||||
|
Number,
|
||||||
|
Path,
|
||||||
|
Domain,
|
||||||
|
Port,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateInboundTemplateDto {
|
||||||
|
pub name: String,
|
||||||
|
pub protocol: String,
|
||||||
|
pub default_port: i32,
|
||||||
|
pub requires_tls: bool,
|
||||||
|
pub config_template: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateInboundTemplateDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub default_port: Option<i32>,
|
||||||
|
pub base_settings: Option<Value>,
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub requires_tls: Option<bool>,
|
||||||
|
pub requires_domain: Option<bool>,
|
||||||
|
pub variables: Option<Vec<TemplateVariable>>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundTemplateResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub protocol: String,
|
||||||
|
pub default_port: i32,
|
||||||
|
pub base_settings: Value,
|
||||||
|
pub stream_settings: Value,
|
||||||
|
pub requires_tls: bool,
|
||||||
|
pub requires_domain: bool,
|
||||||
|
pub variables: Vec<TemplateVariable>,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for InboundTemplateResponse {
|
||||||
|
fn from(template: Model) -> Self {
|
||||||
|
let variables = template.get_variables();
|
||||||
|
Self {
|
||||||
|
id: template.id,
|
||||||
|
name: template.name,
|
||||||
|
description: template.description,
|
||||||
|
protocol: template.protocol,
|
||||||
|
default_port: template.default_port,
|
||||||
|
base_settings: template.base_settings,
|
||||||
|
stream_settings: template.stream_settings,
|
||||||
|
requires_tls: template.requires_tls,
|
||||||
|
requires_domain: template.requires_domain,
|
||||||
|
variables,
|
||||||
|
is_active: template.is_active,
|
||||||
|
created_at: template.created_at,
|
||||||
|
updated_at: template.updated_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateInboundTemplateDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateInboundTemplateDto) -> Self {
|
||||||
|
// Parse config_template as JSON or use default
|
||||||
|
let config_json: Value =
|
||||||
|
serde_json::from_str(&dto.config_template).unwrap_or_else(|_| serde_json::json!({}));
|
||||||
|
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
description: Set(None),
|
||||||
|
protocol: Set(dto.protocol),
|
||||||
|
default_port: Set(dto.default_port),
|
||||||
|
base_settings: Set(config_json.clone()),
|
||||||
|
stream_settings: Set(serde_json::json!({})),
|
||||||
|
requires_tls: Set(dto.requires_tls),
|
||||||
|
requires_domain: Set(false),
|
||||||
|
variables: Set(Value::Array(vec![])),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn get_variables(&self) -> Vec<TemplateVariable> {
|
||||||
|
serde_json::from_value(self.variables.clone()).unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn apply_variables(
|
||||||
|
&self,
|
||||||
|
values: &serde_json::Map<String, Value>,
|
||||||
|
) -> Result<(Value, Value), String> {
|
||||||
|
let base_settings = self.base_settings.clone();
|
||||||
|
let stream_settings = self.stream_settings.clone();
|
||||||
|
|
||||||
|
// Replace variables in JSON using simple string replacement
|
||||||
|
let base_str = base_settings.to_string();
|
||||||
|
let stream_str = stream_settings.to_string();
|
||||||
|
|
||||||
|
let mut result_base = base_str;
|
||||||
|
let mut result_stream = stream_str;
|
||||||
|
|
||||||
|
for (key, value) in values {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
_ => value.to_string(),
|
||||||
|
};
|
||||||
|
result_base = result_base.replace(&placeholder, &replacement);
|
||||||
|
result_stream = result_stream.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
|
||||||
|
let final_base: Value = serde_json::from_str(&result_base)
|
||||||
|
.map_err(|e| format!("Invalid base settings after variable substitution: {}", e))?;
|
||||||
|
let final_stream: Value = serde_json::from_str(&result_stream)
|
||||||
|
.map_err(|e| format!("Invalid stream settings after variable substitution: {}", e))?;
|
||||||
|
|
||||||
|
Ok((final_base, final_stream))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_update(self, dto: UpdateInboundTemplateDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(description) = dto.description {
|
||||||
|
active_model.description = Set(Some(description));
|
||||||
|
}
|
||||||
|
if let Some(default_port) = dto.default_port {
|
||||||
|
active_model.default_port = Set(default_port);
|
||||||
|
}
|
||||||
|
if let Some(base_settings) = dto.base_settings {
|
||||||
|
active_model.base_settings = Set(base_settings);
|
||||||
|
}
|
||||||
|
if let Some(stream_settings) = dto.stream_settings {
|
||||||
|
active_model.stream_settings = Set(stream_settings);
|
||||||
|
}
|
||||||
|
if let Some(requires_tls) = dto.requires_tls {
|
||||||
|
active_model.requires_tls = Set(requires_tls);
|
||||||
|
}
|
||||||
|
if let Some(requires_domain) = dto.requires_domain {
|
||||||
|
active_model.requires_domain = Set(requires_domain);
|
||||||
|
}
|
||||||
|
if let Some(variables) = dto.variables {
|
||||||
|
active_model.variables =
|
||||||
|
Set(serde_json::to_value(variables).unwrap_or(Value::Array(vec![])));
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
190
src/database/entities/inbound_users.rs
Normal file
190
src/database/entities/inbound_users.rs
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "inbound_users")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// Reference to the actual user
|
||||||
|
pub user_id: Uuid,
|
||||||
|
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
|
||||||
|
/// Generated xray user ID (UUID for protocols like vmess/vless)
|
||||||
|
pub xray_user_id: String,
|
||||||
|
|
||||||
|
/// Generated password for protocols like trojan/shadowsocks
|
||||||
|
pub password: Option<String>,
|
||||||
|
|
||||||
|
pub level: i32,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server_inbound::Entity",
|
||||||
|
from = "Column::ServerInboundId",
|
||||||
|
to = "super::server_inbound::Column::Id"
|
||||||
|
)]
|
||||||
|
ServerInbound,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbound.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inbound user creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateInboundUserDto {
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CreateInboundUserDto {
|
||||||
|
/// Generate UUID for xray user (for vmess/vless)
|
||||||
|
pub fn generate_xray_user_id(&self) -> String {
|
||||||
|
Uuid::new_v4().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate random password (for trojan/shadowsocks)
|
||||||
|
pub fn generate_password(&self) -> String {
|
||||||
|
use rand::distributions::Alphanumeric;
|
||||||
|
use rand::prelude::*;
|
||||||
|
|
||||||
|
thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(24)
|
||||||
|
.map(char::from)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inbound user update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateInboundUserDto {
|
||||||
|
pub level: Option<i32>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateInboundUserDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateInboundUserDto) -> Self {
|
||||||
|
let xray_user_id = dto.generate_xray_user_id();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
user_id: Set(dto.user_id),
|
||||||
|
server_inbound_id: Set(dto.server_inbound_id),
|
||||||
|
xray_user_id: Set(xray_user_id),
|
||||||
|
password: Set(Some(dto.generate_password())), // Generate password for all protocols
|
||||||
|
level: Set(dto.level.unwrap_or(0)),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateInboundUserDto
|
||||||
|
pub fn apply_update(self, dto: UpdateInboundUserDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(level) = dto.level {
|
||||||
|
active_model.level = Set(level);
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate email for xray client based on user information
|
||||||
|
pub fn generate_client_email(&self, username: &str) -> String {
|
||||||
|
format!("{}@OutFleet", username)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response model for inbound user
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundUserResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub password: Option<String>,
|
||||||
|
pub level: i32,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for InboundUserResponse {
|
||||||
|
fn from(model: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: model.id,
|
||||||
|
user_id: model.user_id,
|
||||||
|
server_inbound_id: model.server_inbound_id,
|
||||||
|
xray_user_id: model.xray_user_id,
|
||||||
|
password: model.password,
|
||||||
|
level: model.level,
|
||||||
|
is_active: model.is_active,
|
||||||
|
created_at: model.created_at.to_rfc3339(),
|
||||||
|
updated_at: model.updated_at.to_rfc3339(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
17
src/database/entities/mod.rs
Normal file
17
src/database/entities/mod.rs
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
pub mod certificate;
|
||||||
|
pub mod dns_provider;
|
||||||
|
pub mod inbound_template;
|
||||||
|
pub mod inbound_users;
|
||||||
|
pub mod server;
|
||||||
|
pub mod server_inbound;
|
||||||
|
pub mod telegram_config;
|
||||||
|
pub mod user;
|
||||||
|
pub mod user_access;
|
||||||
|
pub mod user_request;
|
||||||
|
|
||||||
|
pub mod prelude {
|
||||||
|
pub use super::certificate::Entity as Certificate;
|
||||||
|
pub use super::inbound_template::Entity as InboundTemplate;
|
||||||
|
pub use super::server::Entity as Server;
|
||||||
|
pub use super::server_inbound::Entity as ServerInbound;
|
||||||
|
}
|
||||||
240
src/database/entities/server.rs
Normal file
240
src/database/entities/server.rs
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "servers")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub hostname: String,
|
||||||
|
|
||||||
|
pub grpc_hostname: String,
|
||||||
|
|
||||||
|
pub grpc_port: i32,
|
||||||
|
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
|
||||||
|
pub status: String,
|
||||||
|
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::certificate::Entity",
|
||||||
|
from = "Column::DefaultCertificateId",
|
||||||
|
to = "super::certificate::Column::Id"
|
||||||
|
)]
|
||||||
|
DefaultCertificate,
|
||||||
|
#[sea_orm(has_many = "super::server_inbound::Entity")]
|
||||||
|
ServerInbounds,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::certificate::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::DefaultCertificate.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbounds.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
status: Set(ServerStatus::Unknown.into()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum ServerStatus {
|
||||||
|
Unknown,
|
||||||
|
Online,
|
||||||
|
Offline,
|
||||||
|
Error,
|
||||||
|
Connecting,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ServerStatus> for String {
|
||||||
|
fn from(status: ServerStatus) -> Self {
|
||||||
|
match status {
|
||||||
|
ServerStatus::Unknown => "unknown".to_string(),
|
||||||
|
ServerStatus::Online => "online".to_string(),
|
||||||
|
ServerStatus::Offline => "offline".to_string(),
|
||||||
|
ServerStatus::Error => "error".to_string(),
|
||||||
|
ServerStatus::Connecting => "connecting".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for ServerStatus {
|
||||||
|
fn from(s: String) -> Self {
|
||||||
|
match s.as_str() {
|
||||||
|
"online" => ServerStatus::Online,
|
||||||
|
"offline" => ServerStatus::Offline,
|
||||||
|
"error" => ServerStatus::Error,
|
||||||
|
"connecting" => ServerStatus::Connecting,
|
||||||
|
_ => ServerStatus::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateServerDto {
|
||||||
|
pub name: String,
|
||||||
|
pub hostname: String,
|
||||||
|
pub grpc_hostname: Option<String>, // Optional, defaults to hostname if not provided
|
||||||
|
pub grpc_port: Option<i32>,
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateServerDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub hostname: Option<String>,
|
||||||
|
pub grpc_hostname: Option<String>,
|
||||||
|
pub grpc_port: Option<i32>,
|
||||||
|
pub api_credentials: Option<String>,
|
||||||
|
pub status: Option<String>,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ServerResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub hostname: String,
|
||||||
|
pub grpc_hostname: String,
|
||||||
|
pub grpc_port: i32,
|
||||||
|
pub status: String,
|
||||||
|
pub default_certificate_id: Option<Uuid>,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
pub has_credentials: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateServerDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateServerDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name.clone()),
|
||||||
|
hostname: Set(dto.hostname.clone()),
|
||||||
|
grpc_hostname: Set(dto.grpc_hostname.unwrap_or(dto.hostname)), // Default to hostname if not provided
|
||||||
|
grpc_port: Set(dto.grpc_port.unwrap_or(2053)),
|
||||||
|
api_credentials: Set(dto.api_credentials),
|
||||||
|
status: Set("unknown".to_string()),
|
||||||
|
default_certificate_id: Set(dto.default_certificate_id),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for ServerResponse {
|
||||||
|
fn from(server: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: server.id,
|
||||||
|
name: server.name,
|
||||||
|
hostname: server.hostname,
|
||||||
|
grpc_hostname: server.grpc_hostname,
|
||||||
|
grpc_port: server.grpc_port,
|
||||||
|
status: server.status,
|
||||||
|
default_certificate_id: server.default_certificate_id,
|
||||||
|
created_at: server.created_at,
|
||||||
|
updated_at: server.updated_at,
|
||||||
|
has_credentials: server.api_credentials.is_some(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn apply_update(self, dto: UpdateServerDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(hostname) = dto.hostname {
|
||||||
|
active_model.hostname = Set(hostname);
|
||||||
|
}
|
||||||
|
if let Some(grpc_hostname) = dto.grpc_hostname {
|
||||||
|
active_model.grpc_hostname = Set(grpc_hostname);
|
||||||
|
}
|
||||||
|
if let Some(grpc_port) = dto.grpc_port {
|
||||||
|
active_model.grpc_port = Set(grpc_port);
|
||||||
|
}
|
||||||
|
if let Some(api_credentials) = dto.api_credentials {
|
||||||
|
active_model.api_credentials = Set(Some(api_credentials));
|
||||||
|
}
|
||||||
|
if let Some(status) = dto.status {
|
||||||
|
active_model.status = Set(status);
|
||||||
|
}
|
||||||
|
if let Some(default_certificate_id) = dto.default_certificate_id {
|
||||||
|
active_model.default_certificate_id = Set(Some(default_certificate_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_grpc_endpoint(&self) -> String {
|
||||||
|
let hostname = if self.grpc_hostname.is_empty() {
|
||||||
|
tracing::debug!(
|
||||||
|
"Using public hostname '{}' for gRPC (grpc_hostname is empty)",
|
||||||
|
self.hostname
|
||||||
|
);
|
||||||
|
&self.hostname
|
||||||
|
} else {
|
||||||
|
tracing::debug!(
|
||||||
|
"Using dedicated gRPC hostname '{}' (different from public hostname '{}')",
|
||||||
|
self.grpc_hostname,
|
||||||
|
self.hostname
|
||||||
|
);
|
||||||
|
&self.grpc_hostname
|
||||||
|
};
|
||||||
|
let endpoint = format!("{}:{}", hostname, self.grpc_port);
|
||||||
|
tracing::info!("gRPC endpoint for server '{}': {}", self.name, endpoint);
|
||||||
|
endpoint
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_status(&self) -> ServerStatus {
|
||||||
|
self.status.clone().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
205
src/database/entities/server_inbound.rs
Normal file
205
src/database/entities/server_inbound.rs
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "server_inbounds")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
pub server_id: Uuid,
|
||||||
|
|
||||||
|
pub template_id: Uuid,
|
||||||
|
|
||||||
|
pub tag: String,
|
||||||
|
|
||||||
|
pub port_override: Option<i32>,
|
||||||
|
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
|
||||||
|
pub variable_values: Value,
|
||||||
|
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server::Entity",
|
||||||
|
from = "Column::ServerId",
|
||||||
|
to = "super::server::Column::Id"
|
||||||
|
)]
|
||||||
|
Server,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::inbound_template::Entity",
|
||||||
|
from = "Column::TemplateId",
|
||||||
|
to = "super::inbound_template::Column::Id"
|
||||||
|
)]
|
||||||
|
Template,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::certificate::Entity",
|
||||||
|
from = "Column::CertificateId",
|
||||||
|
to = "super::certificate::Column::Id"
|
||||||
|
)]
|
||||||
|
Certificate,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Server.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::inbound_template::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Template.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::certificate::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Certificate.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateServerInboundDto {
|
||||||
|
pub template_id: Uuid,
|
||||||
|
pub port: i32,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub is_active: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateServerInboundDto {
|
||||||
|
pub tag: Option<String>,
|
||||||
|
pub port_override: Option<i32>,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub variable_values: Option<serde_json::Map<String, Value>>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ServerInboundResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub template_id: Uuid,
|
||||||
|
pub tag: String,
|
||||||
|
pub port: i32,
|
||||||
|
pub certificate_id: Option<Uuid>,
|
||||||
|
pub variable_values: Value,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
// Populated by joins (simplified for now)
|
||||||
|
pub template_name: Option<String>,
|
||||||
|
pub certificate_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for ServerInboundResponse {
|
||||||
|
fn from(inbound: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: inbound.id,
|
||||||
|
server_id: inbound.server_id,
|
||||||
|
template_id: inbound.template_id,
|
||||||
|
tag: inbound.tag,
|
||||||
|
port: inbound.port_override.unwrap_or(443), // Default port if not set
|
||||||
|
certificate_id: inbound.certificate_id,
|
||||||
|
variable_values: inbound.variable_values,
|
||||||
|
is_active: inbound.is_active,
|
||||||
|
created_at: inbound.created_at,
|
||||||
|
updated_at: inbound.updated_at,
|
||||||
|
template_name: None, // Will be filled by repository if needed
|
||||||
|
certificate_name: None, // Will be filled by repository if needed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn apply_update(self, dto: UpdateServerInboundDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(tag) = dto.tag {
|
||||||
|
active_model.tag = Set(tag);
|
||||||
|
}
|
||||||
|
if let Some(port_override) = dto.port_override {
|
||||||
|
active_model.port_override = Set(Some(port_override));
|
||||||
|
}
|
||||||
|
if let Some(certificate_id) = dto.certificate_id {
|
||||||
|
active_model.certificate_id = Set(Some(certificate_id));
|
||||||
|
}
|
||||||
|
if let Some(variable_values) = dto.variable_values {
|
||||||
|
active_model.variable_values = Set(Value::Object(variable_values));
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_variable_values(&self) -> serde_json::Map<String, Value> {
|
||||||
|
if let Value::Object(map) = &self.variable_values {
|
||||||
|
map.clone()
|
||||||
|
} else {
|
||||||
|
serde_json::Map::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn get_effective_port(&self, template_default_port: i32) -> i32 {
|
||||||
|
self.port_override.unwrap_or(template_default_port)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateServerInboundDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateServerInboundDto) -> Self {
|
||||||
|
Self {
|
||||||
|
template_id: Set(dto.template_id),
|
||||||
|
tag: Set(format!("inbound-{}", Uuid::new_v4())), // Generate unique tag
|
||||||
|
port_override: Set(Some(dto.port)),
|
||||||
|
certificate_id: Set(dto.certificate_id),
|
||||||
|
variable_values: Set(Value::Object(serde_json::Map::new())),
|
||||||
|
is_active: Set(dto.is_active),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
96
src/database/entities/telegram_config.rs
Normal file
96
src/database/entities/telegram_config.rs
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "telegram_config")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// Telegram bot token (encrypted in production)
|
||||||
|
pub bot_token: String,
|
||||||
|
|
||||||
|
/// Whether the bot is active
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
/// When the config was created
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
/// Last time config was updated
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
/// Called before insert and update
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Called before update
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
} else if self.id.is_not_set() {
|
||||||
|
self.id = Set(Uuid::new_v4());
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.created_at.is_not_set() {
|
||||||
|
self.created_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.updated_at.is_not_set() {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// DTO for creating a new Telegram configuration
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct CreateTelegramConfigDto {
|
||||||
|
pub bot_token: String,
|
||||||
|
pub is_active: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// DTO for updating Telegram configuration
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateTelegramConfigDto {
|
||||||
|
pub bot_token: Option<String>,
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Convert to ActiveModel for updates
|
||||||
|
pub fn into_active_model(self) -> ActiveModel {
|
||||||
|
ActiveModel {
|
||||||
|
id: Set(self.id),
|
||||||
|
bot_token: Set(self.bot_token),
|
||||||
|
is_active: Set(self.is_active),
|
||||||
|
created_at: Set(self.created_at),
|
||||||
|
updated_at: Set(self.updated_at),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
204
src/database/entities/user.rs
Normal file
204
src/database/entities/user.rs
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "users")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// User display name
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Optional comment/description about the user
|
||||||
|
#[sea_orm(column_type = "Text")]
|
||||||
|
pub comment: Option<String>,
|
||||||
|
|
||||||
|
/// Optional Telegram user ID for bot integration
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
|
||||||
|
/// Whether the user is a Telegram admin
|
||||||
|
pub is_telegram_admin: bool,
|
||||||
|
|
||||||
|
/// When the user was registered/created
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
/// Last time user record was updated
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
/// Called before insert and update
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
is_telegram_admin: Set(false),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Called before update
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateUserDto {
|
||||||
|
pub name: String,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub is_telegram_admin: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateUserDto {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
pub is_telegram_admin: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateUserDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateUserDto) -> Self {
|
||||||
|
Self {
|
||||||
|
name: Set(dto.name),
|
||||||
|
comment: Set(dto.comment),
|
||||||
|
telegram_id: Set(dto.telegram_id),
|
||||||
|
is_telegram_admin: Set(dto.is_telegram_admin),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateUserDto
|
||||||
|
pub fn apply_update(self, dto: UpdateUserDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(name) = dto.name {
|
||||||
|
active_model.name = Set(name);
|
||||||
|
}
|
||||||
|
if let Some(comment) = dto.comment {
|
||||||
|
active_model.comment = Set(Some(comment));
|
||||||
|
} else if dto.comment.is_some() {
|
||||||
|
// Explicitly set to None if Some(None) was passed
|
||||||
|
active_model.comment = Set(None);
|
||||||
|
}
|
||||||
|
if dto.telegram_id.is_some() {
|
||||||
|
active_model.telegram_id = Set(dto.telegram_id);
|
||||||
|
}
|
||||||
|
if let Some(is_admin) = dto.is_telegram_admin {
|
||||||
|
active_model.is_telegram_admin = Set(is_admin);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if user has Telegram integration
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn has_telegram(&self) -> bool {
|
||||||
|
self.telegram_id.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get display name with optional comment
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn display_name(&self) -> String {
|
||||||
|
match &self.comment {
|
||||||
|
Some(comment) if !comment.is_empty() => format!("{} ({})", self.name, comment),
|
||||||
|
_ => self.name.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_user_dto_conversion() {
|
||||||
|
let dto = CreateUserDto {
|
||||||
|
name: "Test User".to_string(),
|
||||||
|
comment: Some("Test comment".to_string()),
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
is_telegram_admin: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
|
||||||
|
assert_eq!(active_model.name.unwrap(), "Test User");
|
||||||
|
assert_eq!(
|
||||||
|
active_model.comment.unwrap(),
|
||||||
|
Some("Test comment".to_string())
|
||||||
|
);
|
||||||
|
assert_eq!(active_model.telegram_id.unwrap(), Some(123456789));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_user_display_name() {
|
||||||
|
let user = Model {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
name: "John Doe".to_string(),
|
||||||
|
comment: Some("Admin user".to_string()),
|
||||||
|
telegram_id: None,
|
||||||
|
is_telegram_admin: false,
|
||||||
|
created_at: chrono::Utc::now(),
|
||||||
|
updated_at: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(user.display_name(), "John Doe (Admin user)");
|
||||||
|
|
||||||
|
let user_no_comment = Model {
|
||||||
|
comment: None,
|
||||||
|
..user
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(user_no_comment.display_name(), "John Doe");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_telegram() {
|
||||||
|
let user_with_telegram = Model {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
name: "User".to_string(),
|
||||||
|
comment: None,
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
is_telegram_admin: false,
|
||||||
|
created_at: chrono::Utc::now(),
|
||||||
|
updated_at: chrono::Utc::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let user_without_telegram = Model {
|
||||||
|
telegram_id: None,
|
||||||
|
..user_with_telegram.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(user_with_telegram.has_telegram());
|
||||||
|
assert!(!user_without_telegram.has_telegram());
|
||||||
|
}
|
||||||
|
}
|
||||||
189
src/database/entities/user_access.rs
Normal file
189
src/database/entities/user_access.rs
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "user_access")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
|
||||||
|
/// User ID this access is for
|
||||||
|
pub user_id: Uuid,
|
||||||
|
|
||||||
|
/// Server ID this access applies to
|
||||||
|
pub server_id: Uuid,
|
||||||
|
|
||||||
|
/// Server inbound ID this access applies to
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
|
||||||
|
/// User's unique identifier in xray (UUID for VLESS/VMess, password for Trojan)
|
||||||
|
pub xray_user_id: String,
|
||||||
|
|
||||||
|
/// User's email in xray
|
||||||
|
pub xray_email: String,
|
||||||
|
|
||||||
|
/// User level in xray (0-255)
|
||||||
|
pub level: i32,
|
||||||
|
|
||||||
|
/// Whether this access is currently active
|
||||||
|
pub is_active: bool,
|
||||||
|
|
||||||
|
/// When this access was created
|
||||||
|
pub created_at: DateTimeUtc,
|
||||||
|
|
||||||
|
/// Last time this access was updated
|
||||||
|
pub updated_at: DateTimeUtc,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server::Entity",
|
||||||
|
from = "Column::ServerId",
|
||||||
|
to = "super::server::Column::Id"
|
||||||
|
)]
|
||||||
|
Server,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::server_inbound::Entity",
|
||||||
|
from = "Column::ServerInboundId",
|
||||||
|
to = "super::server_inbound::Column::Id"
|
||||||
|
)]
|
||||||
|
ServerInbound,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Server.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::server_inbound::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::ServerInbound.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
..ActiveModelTrait::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn before_save<'life0, 'async_trait, C>(
|
||||||
|
mut self,
|
||||||
|
_db: &'life0 C,
|
||||||
|
insert: bool,
|
||||||
|
) -> core::pin::Pin<
|
||||||
|
Box<dyn core::future::Future<Output = Result<Self, DbErr>> + Send + 'async_trait>,
|
||||||
|
>
|
||||||
|
where
|
||||||
|
'life0: 'async_trait,
|
||||||
|
C: 'async_trait + ConnectionTrait,
|
||||||
|
Self: 'async_trait,
|
||||||
|
{
|
||||||
|
Box::pin(async move {
|
||||||
|
if !insert {
|
||||||
|
self.updated_at = Set(chrono::Utc::now());
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User access creation data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateUserAccessDto {
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub xray_email: String,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// User access update data transfer object
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateUserAccessDto {
|
||||||
|
pub is_active: Option<bool>,
|
||||||
|
pub level: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateUserAccessDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateUserAccessDto) -> Self {
|
||||||
|
Self {
|
||||||
|
user_id: Set(dto.user_id),
|
||||||
|
server_id: Set(dto.server_id),
|
||||||
|
server_inbound_id: Set(dto.server_inbound_id),
|
||||||
|
xray_user_id: Set(dto.xray_user_id),
|
||||||
|
xray_email: Set(dto.xray_email),
|
||||||
|
level: Set(dto.level.unwrap_or(0)),
|
||||||
|
is_active: Set(true),
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
/// Update this model with data from UpdateUserAccessDto
|
||||||
|
pub fn apply_update(self, dto: UpdateUserAccessDto) -> ActiveModel {
|
||||||
|
let mut active_model: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
if let Some(level) = dto.level {
|
||||||
|
active_model.level = Set(level);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response model for user access
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UserAccessResponse {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_id: Uuid,
|
||||||
|
pub server_inbound_id: Uuid,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub xray_email: String,
|
||||||
|
pub level: i32,
|
||||||
|
pub is_active: bool,
|
||||||
|
pub created_at: String,
|
||||||
|
pub updated_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Model> for UserAccessResponse {
|
||||||
|
fn from(model: Model) -> Self {
|
||||||
|
Self {
|
||||||
|
id: model.id,
|
||||||
|
user_id: model.user_id,
|
||||||
|
server_id: model.server_id,
|
||||||
|
server_inbound_id: model.server_inbound_id,
|
||||||
|
xray_user_id: model.xray_user_id,
|
||||||
|
xray_email: model.xray_email,
|
||||||
|
level: model.level,
|
||||||
|
is_active: model.is_active,
|
||||||
|
created_at: model.created_at.to_rfc3339(),
|
||||||
|
updated_at: model.updated_at.to_rfc3339(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
174
src/database/entities/user_request.rs
Normal file
174
src/database/entities/user_request.rs
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
|
||||||
|
#[sea_orm(table_name = "user_requests")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub telegram_id: i64,
|
||||||
|
pub telegram_username: Option<String>,
|
||||||
|
pub telegram_first_name: Option<String>,
|
||||||
|
pub telegram_last_name: Option<String>,
|
||||||
|
pub status: String, // pending, approved, declined
|
||||||
|
pub request_message: Option<String>,
|
||||||
|
pub response_message: Option<String>,
|
||||||
|
pub processed_by_user_id: Option<Uuid>,
|
||||||
|
pub processed_at: Option<DateTimeWithTimeZone>,
|
||||||
|
pub language: String, // User's language preference (en, ru)
|
||||||
|
pub created_at: DateTimeWithTimeZone,
|
||||||
|
pub updated_at: DateTimeWithTimeZone,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "SetNull"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::ProcessedByUserId",
|
||||||
|
to = "super::user::Column::Id",
|
||||||
|
on_update = "Cascade",
|
||||||
|
on_delete = "SetNull"
|
||||||
|
)]
|
||||||
|
ProcessedByUser,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
|
|
||||||
|
// Request status enum
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum RequestStatus {
|
||||||
|
Pending,
|
||||||
|
Approved,
|
||||||
|
Declined,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RequestStatus {
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
RequestStatus::Pending => "pending",
|
||||||
|
RequestStatus::Approved => "approved",
|
||||||
|
RequestStatus::Declined => "declined",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_str(s: &str) -> Option<Self> {
|
||||||
|
match s {
|
||||||
|
"pending" => Some(RequestStatus::Pending),
|
||||||
|
"approved" => Some(RequestStatus::Approved),
|
||||||
|
"declined" => Some(RequestStatus::Declined),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn get_status(&self) -> RequestStatus {
|
||||||
|
RequestStatus::from_str(&self.status).unwrap_or(RequestStatus::Pending)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_full_name(&self) -> String {
|
||||||
|
let mut parts = vec![];
|
||||||
|
if let Some(first) = &self.telegram_first_name {
|
||||||
|
parts.push(first.clone());
|
||||||
|
}
|
||||||
|
if let Some(last) = &self.telegram_last_name {
|
||||||
|
parts.push(last.clone());
|
||||||
|
}
|
||||||
|
if parts.is_empty() {
|
||||||
|
self.telegram_username
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| format!("User {}", self.telegram_id))
|
||||||
|
} else {
|
||||||
|
parts.join(" ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_telegram_link(&self) -> String {
|
||||||
|
if let Some(username) = &self.telegram_username {
|
||||||
|
format!("@{}", username)
|
||||||
|
} else {
|
||||||
|
format!("tg://user?id={}", self.telegram_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_language(&self) -> String {
|
||||||
|
self.language.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DTOs for creating and updating user requests
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateUserRequestDto {
|
||||||
|
pub telegram_id: i64,
|
||||||
|
pub telegram_username: Option<String>,
|
||||||
|
pub telegram_first_name: Option<String>,
|
||||||
|
pub telegram_last_name: Option<String>,
|
||||||
|
pub request_message: Option<String>,
|
||||||
|
pub language: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateUserRequestDto {
|
||||||
|
pub status: Option<String>,
|
||||||
|
pub response_message: Option<String>,
|
||||||
|
pub processed_by_user_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CreateUserRequestDto> for ActiveModel {
|
||||||
|
fn from(dto: CreateUserRequestDto) -> Self {
|
||||||
|
use sea_orm::ActiveValue::*;
|
||||||
|
|
||||||
|
ActiveModel {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
user_id: Set(None),
|
||||||
|
telegram_id: Set(dto.telegram_id),
|
||||||
|
telegram_username: Set(dto.telegram_username),
|
||||||
|
telegram_first_name: Set(dto.telegram_first_name),
|
||||||
|
telegram_last_name: Set(dto.telegram_last_name),
|
||||||
|
status: Set("pending".to_string()),
|
||||||
|
request_message: Set(dto.request_message),
|
||||||
|
response_message: Set(None),
|
||||||
|
processed_by_user_id: Set(None),
|
||||||
|
processed_at: Set(None),
|
||||||
|
language: Set(dto.language),
|
||||||
|
created_at: Set(chrono::Utc::now().into()),
|
||||||
|
updated_at: Set(chrono::Utc::now().into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model {
|
||||||
|
pub fn apply_update(self, dto: UpdateUserRequestDto, processed_by: Uuid) -> ActiveModel {
|
||||||
|
use sea_orm::ActiveValue::*;
|
||||||
|
|
||||||
|
let mut active: ActiveModel = self.into();
|
||||||
|
|
||||||
|
if let Some(status) = dto.status {
|
||||||
|
active.status = Set(status);
|
||||||
|
active.processed_by_user_id = Set(Some(processed_by));
|
||||||
|
active.processed_at = Set(Some(chrono::Utc::now().into()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(response) = dto.response_message {
|
||||||
|
active.response_message = Set(Some(response));
|
||||||
|
}
|
||||||
|
|
||||||
|
active.updated_at = Set(chrono::Utc::now().into());
|
||||||
|
active
|
||||||
|
}
|
||||||
|
}
|
||||||
113
src/database/migrations/m20241201_000001_create_users_table.rs
Normal file
113
src/database/migrations/m20241201_000001_create_users_table.rs
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Create users table
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Users::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(ColumnDef::new(Users::Id).uuid().not_null().primary_key())
|
||||||
|
.col(ColumnDef::new(Users::Name).string_len(255).not_null())
|
||||||
|
.col(ColumnDef::new(Users::Comment).text().null())
|
||||||
|
.col(ColumnDef::new(Users::TelegramId).big_integer().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Users::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on name for faster searches
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_name")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::Name)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create unique index on telegram_id (if not null)
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_telegram_id")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::TelegramId)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on created_at for sorting
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_users_created_at")
|
||||||
|
.table(Users::Table)
|
||||||
|
.col(Users::CreatedAt)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop indexes first
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_users_created_at")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_users_telegram_id")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(Index::drop().if_exists().name("idx_users_name").to_owned())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop table
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Users::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Comment,
|
||||||
|
TelegramId,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,108 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Certificates::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::CertType)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::Domain)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Certificates::CertData).blob().not_null())
|
||||||
|
.col(ColumnDef::new(Certificates::KeyData).blob().not_null())
|
||||||
|
.col(ColumnDef::new(Certificates::ChainData).blob().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::ExpiresAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::AutoRenew)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Certificates::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on domain for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_certificates_domain")
|
||||||
|
.table(Certificates::Table)
|
||||||
|
.col(Certificates::Domain)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_certificates_domain")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Certificates::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
CertType,
|
||||||
|
Domain,
|
||||||
|
CertData,
|
||||||
|
KeyData,
|
||||||
|
ChainData,
|
||||||
|
ExpiresAt,
|
||||||
|
AutoRenew,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,151 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(InboundTemplates::Description).text().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Protocol)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::DefaultPort)
|
||||||
|
.integer()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::BaseSettings)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::StreamSettings)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::RequiresTls)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::RequiresDomain)
|
||||||
|
.boolean()
|
||||||
|
.default(false)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::Variables)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.default(true)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundTemplates::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on name for searches
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_inbound_templates_name")
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.col(InboundTemplates::Name)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on protocol
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_inbound_templates_protocol")
|
||||||
|
.table(InboundTemplates::Table)
|
||||||
|
.col(InboundTemplates::Protocol)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_inbound_templates_protocol")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_inbound_templates_name")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(InboundTemplates::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundTemplates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Description,
|
||||||
|
Protocol,
|
||||||
|
DefaultPort,
|
||||||
|
BaseSettings,
|
||||||
|
StreamSettings,
|
||||||
|
RequiresTls,
|
||||||
|
RequiresDomain,
|
||||||
|
Variables,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
115
src/database/migrations/m20241201_000004_create_servers_table.rs
Normal file
115
src/database/migrations/m20241201_000004_create_servers_table.rs
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Servers::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(ColumnDef::new(Servers::Id).uuid().not_null().primary_key())
|
||||||
|
.col(ColumnDef::new(Servers::Name).string_len(255).not_null())
|
||||||
|
.col(ColumnDef::new(Servers::Hostname).string_len(255).not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::GrpcPort)
|
||||||
|
.integer()
|
||||||
|
.default(2053)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Servers::ApiCredentials).text().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::Status)
|
||||||
|
.string_len(50)
|
||||||
|
.default("unknown")
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Servers::DefaultCertificateId).uuid().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Servers::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Foreign key to certificates
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_servers_default_certificate")
|
||||||
|
.from(Servers::Table, Servers::DefaultCertificateId)
|
||||||
|
.to(Certificates::Table, Certificates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on hostname
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_servers_hostname")
|
||||||
|
.table(Servers::Table)
|
||||||
|
.col(Servers::Hostname)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_servers_default_certificate")
|
||||||
|
.table(Servers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_servers_hostname")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Servers::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
Hostname,
|
||||||
|
GrpcPort,
|
||||||
|
ApiCredentials,
|
||||||
|
Status,
|
||||||
|
DefaultCertificateId,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,183 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(ServerInbounds::ServerId).uuid().not_null())
|
||||||
|
.col(ColumnDef::new(ServerInbounds::TemplateId).uuid().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::Tag)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::PortOverride)
|
||||||
|
.integer()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(ServerInbounds::CertificateId).uuid().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::VariableValues)
|
||||||
|
.json()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.default(true)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(ServerInbounds::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Foreign keys
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_server")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::ServerId)
|
||||||
|
.to(Servers::Table, Servers::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_template")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::TemplateId)
|
||||||
|
.to(InboundTemplates::Table, InboundTemplates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Restrict)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_server_inbounds_certificate")
|
||||||
|
.from(ServerInbounds::Table, ServerInbounds::CertificateId)
|
||||||
|
.to(Certificates::Table, Certificates::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Unique constraint on server_id + tag
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_server_inbounds_server_tag")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.col(ServerInbounds::ServerId)
|
||||||
|
.col(ServerInbounds::Tag)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_certificate")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_template")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_server_inbounds_server")
|
||||||
|
.table(ServerInbounds::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_server_inbounds_server_tag")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(ServerInbounds::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
ServerId,
|
||||||
|
TemplateId,
|
||||||
|
Tag,
|
||||||
|
PortOverride,
|
||||||
|
CertificateId,
|
||||||
|
VariableValues,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundTemplates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Certificates {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,172 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(UserAccess::UserId).uuid().not_null())
|
||||||
|
.col(ColumnDef::new(UserAccess::ServerId).uuid().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::ServerInboundId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(UserAccess::XrayUserId).string().not_null())
|
||||||
|
.col(ColumnDef::new(UserAccess::XrayEmail).string().not_null())
|
||||||
|
.col(ColumnDef::new(UserAccess::Level).integer().not_null())
|
||||||
|
.col(ColumnDef::new(UserAccess::IsActive).boolean().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserAccess::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_user_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::UserId)
|
||||||
|
.to(Users::Table, Users::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_server_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::ServerId)
|
||||||
|
.to(Servers::Table, Servers::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_access_server_inbound_id")
|
||||||
|
.from(UserAccess::Table, UserAccess::ServerInboundId)
|
||||||
|
.to(ServerInbounds::Table, ServerInbounds::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create indexes separately
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_server_inbound")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::ServerId)
|
||||||
|
.col(UserAccess::ServerInboundId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_user_server")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::UserId)
|
||||||
|
.col(UserAccess::ServerId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_user_access_xray_email")
|
||||||
|
.table(UserAccess::Table)
|
||||||
|
.col(UserAccess::XrayEmail)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop indexes first
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_xray_email")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_user_server")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_user_access_server_inbound")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop table
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(UserAccess::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum UserAccess {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
UserId,
|
||||||
|
ServerId,
|
||||||
|
ServerInboundId,
|
||||||
|
XrayUserId,
|
||||||
|
XrayEmail,
|
||||||
|
Level,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,113 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::ServerInboundId)
|
||||||
|
.uuid()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(InboundUsers::Username).string().not_null())
|
||||||
|
.col(ColumnDef::new(InboundUsers::Email).string().not_null())
|
||||||
|
.col(ColumnDef::new(InboundUsers::XrayUserId).string().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::Level)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.default(0),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.not_null()
|
||||||
|
.default(true),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(InboundUsers::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_inbound_users_server_inbound")
|
||||||
|
.from(InboundUsers::Table, InboundUsers::ServerInboundId)
|
||||||
|
.to(ServerInbounds::Table, ServerInbounds::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create unique constraint: one user per inbound
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::ServerInboundId)
|
||||||
|
.col(InboundUsers::Username)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on email for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_email")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::Email)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(InboundUsers::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundUsers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
ServerInboundId,
|
||||||
|
Username,
|
||||||
|
Email,
|
||||||
|
XrayUserId,
|
||||||
|
Level,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServerInbounds {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,238 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop existing indexes that reference columns we're removing
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.ok(); // Ignore error if index doesn't exist
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.name("idx_inbound_users_email")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.ok(); // Ignore error if index doesn't exist
|
||||||
|
|
||||||
|
// Add user_id column
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(InboundUsers::UserId)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.default(Expr::val("00000000-0000-0000-0000-000000000000")),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Add password column
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.add_column(ColumnDef::new(InboundUsers::Password).string().null())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop old columns (username and email)
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.drop_column(InboundUsers::Username)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.drop_column(InboundUsers::Email)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Add foreign key to users table
|
||||||
|
manager
|
||||||
|
.create_foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_inbound_users_user")
|
||||||
|
.from(InboundUsers::Table, InboundUsers::UserId)
|
||||||
|
.to(Users::Table, Users::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create new unique constraint: one user per inbound
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::UserId)
|
||||||
|
.col(InboundUsers::ServerInboundId)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on user_id for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_user_id")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::UserId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop new indexes
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.name("idx_inbound_users_user_id")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop foreign key
|
||||||
|
manager
|
||||||
|
.drop_foreign_key(
|
||||||
|
ForeignKey::drop()
|
||||||
|
.name("fk_inbound_users_user")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Add back old columns
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(InboundUsers::Username)
|
||||||
|
.string()
|
||||||
|
.not_null()
|
||||||
|
.default(""),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(InboundUsers::Email)
|
||||||
|
.string()
|
||||||
|
.not_null()
|
||||||
|
.default(""),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop new columns
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.drop_column(InboundUsers::UserId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.drop_column(InboundUsers::Password)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Recreate old indexes
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_unique_user_per_inbound")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::ServerInboundId)
|
||||||
|
.col(InboundUsers::Username)
|
||||||
|
.unique()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_inbound_users_email")
|
||||||
|
.table(InboundUsers::Table)
|
||||||
|
.col(InboundUsers::Email)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum InboundUsers {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
UserId,
|
||||||
|
ServerInboundId,
|
||||||
|
Username,
|
||||||
|
Email,
|
||||||
|
XrayUserId,
|
||||||
|
Password,
|
||||||
|
Level,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Servers::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(Servers::GrpcHostname)
|
||||||
|
.string()
|
||||||
|
.not_null()
|
||||||
|
.default(""),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Update existing servers: set grpc_hostname to hostname value
|
||||||
|
let db = manager.get_connection();
|
||||||
|
|
||||||
|
// Use raw SQL to copy hostname to grpc_hostname for existing records
|
||||||
|
// Handle both empty strings and default empty values
|
||||||
|
db.execute_unprepared("UPDATE servers SET grpc_hostname = hostname WHERE grpc_hostname = '' OR grpc_hostname IS NULL")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Servers::Table)
|
||||||
|
.drop_column(Servers::GrpcHostname)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum Servers {
|
||||||
|
Table,
|
||||||
|
GrpcHostname,
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(DnsProviders::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::Name)
|
||||||
|
.string_len(255)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::ProviderType)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(DnsProviders::ApiToken).text().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.default(true)
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(DnsProviders::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Index on name for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.if_not_exists()
|
||||||
|
.name("idx_dns_providers_name")
|
||||||
|
.table(DnsProviders::Table)
|
||||||
|
.col(DnsProviders::Name)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_index(
|
||||||
|
Index::drop()
|
||||||
|
.if_exists()
|
||||||
|
.name("idx_dns_providers_name")
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(DnsProviders::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum DnsProviders {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
ProviderType,
|
||||||
|
ApiToken,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(TelegramConfig::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(TelegramConfig::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(TelegramConfig::BotToken).string().not_null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(TelegramConfig::IsActive)
|
||||||
|
.boolean()
|
||||||
|
.not_null()
|
||||||
|
.default(false),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(TelegramConfig::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(TelegramConfig::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null(),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(TelegramConfig::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
pub enum TelegramConfig {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
BotToken,
|
||||||
|
IsActive,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Users::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(Users::IsTelegramAdmin)
|
||||||
|
.boolean()
|
||||||
|
.not_null()
|
||||||
|
.default(false),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(Users::Table)
|
||||||
|
.drop_column(Users::IsTelegramAdmin)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
IsTelegramAdmin,
|
||||||
|
}
|
||||||
@@ -0,0 +1,183 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Create user_requests table
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(UserRequests::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::Id)
|
||||||
|
.uuid()
|
||||||
|
.not_null()
|
||||||
|
.primary_key()
|
||||||
|
.default(Expr::cust("gen_random_uuid()")),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::UserId).uuid().null(), // Can be null if user doesn't exist yet
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::TelegramId)
|
||||||
|
.big_integer()
|
||||||
|
.not_null()
|
||||||
|
.unique_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::TelegramUsername)
|
||||||
|
.string()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::TelegramFirstName)
|
||||||
|
.string()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::TelegramLastName)
|
||||||
|
.string()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::Status)
|
||||||
|
.string()
|
||||||
|
.not_null()
|
||||||
|
.default("pending"), // pending, approved, declined
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(UserRequests::RequestMessage).text().null())
|
||||||
|
.col(ColumnDef::new(UserRequests::ResponseMessage).text().null())
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::ProcessedByUserId)
|
||||||
|
.uuid()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::ProcessedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.null(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::CreatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null()
|
||||||
|
.default(Expr::current_timestamp()),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserRequests::UpdatedAt)
|
||||||
|
.timestamp_with_time_zone()
|
||||||
|
.not_null()
|
||||||
|
.default(Expr::current_timestamp()),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_requests_user")
|
||||||
|
.from(UserRequests::Table, UserRequests::UserId)
|
||||||
|
.to(Users::Table, Users::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.on_update(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("fk_user_requests_processed_by")
|
||||||
|
.from(UserRequests::Table, UserRequests::ProcessedByUserId)
|
||||||
|
.to(Users::Table, Users::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull)
|
||||||
|
.on_update(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on telegram_id for faster lookups
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_user_requests_telegram_id")
|
||||||
|
.table(UserRequests::Table)
|
||||||
|
.col(UserRequests::TelegramId)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create index on status for filtering
|
||||||
|
manager
|
||||||
|
.create_index(
|
||||||
|
Index::create()
|
||||||
|
.name("idx_user_requests_status")
|
||||||
|
.table(UserRequests::Table)
|
||||||
|
.col(UserRequests::Status)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create trigger to update updated_at timestamp
|
||||||
|
manager
|
||||||
|
.get_connection()
|
||||||
|
.execute_unprepared(
|
||||||
|
r#"
|
||||||
|
CREATE OR REPLACE FUNCTION update_user_requests_updated_at()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE TRIGGER user_requests_updated_at
|
||||||
|
BEFORE UPDATE ON user_requests
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_user_requests_updated_at();
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop trigger and function
|
||||||
|
manager
|
||||||
|
.get_connection()
|
||||||
|
.execute_unprepared(
|
||||||
|
r#"
|
||||||
|
DROP TRIGGER IF EXISTS user_requests_updated_at ON user_requests;
|
||||||
|
DROP FUNCTION IF EXISTS update_user_requests_updated_at();
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Drop table
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(UserRequests::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum UserRequests {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
UserId,
|
||||||
|
TelegramId,
|
||||||
|
TelegramUsername,
|
||||||
|
TelegramFirstName,
|
||||||
|
TelegramLastName,
|
||||||
|
Status,
|
||||||
|
RequestMessage,
|
||||||
|
ResponseMessage,
|
||||||
|
ProcessedByUserId,
|
||||||
|
ProcessedAt,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum Users {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
}
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Drop the unique constraint on telegram_id
|
||||||
|
// This allows users to have multiple requests (e.g., if one was declined)
|
||||||
|
manager
|
||||||
|
.get_connection()
|
||||||
|
.execute_unprepared(
|
||||||
|
r#"
|
||||||
|
ALTER TABLE user_requests
|
||||||
|
DROP CONSTRAINT IF EXISTS user_requests_telegram_id_key;
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Re-add the unique constraint
|
||||||
|
manager
|
||||||
|
.get_connection()
|
||||||
|
.execute_unprepared(
|
||||||
|
r#"
|
||||||
|
ALTER TABLE user_requests
|
||||||
|
ADD CONSTRAINT user_requests_telegram_id_key UNIQUE (telegram_id);
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Add language column to user_requests table
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(UserRequests::Table)
|
||||||
|
.add_column(
|
||||||
|
ColumnDef::new(UserRequests::Language)
|
||||||
|
.string()
|
||||||
|
.default("en"), // Default to English
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
// Remove language column from user_requests table
|
||||||
|
manager
|
||||||
|
.alter_table(
|
||||||
|
Table::alter()
|
||||||
|
.table(UserRequests::Table)
|
||||||
|
.drop_column(UserRequests::Language)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Iden)]
|
||||||
|
enum UserRequests {
|
||||||
|
Table,
|
||||||
|
Language,
|
||||||
|
}
|
||||||
42
src/database/migrations/mod.rs
Normal file
42
src/database/migrations/mod.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
mod m20241201_000001_create_users_table;
|
||||||
|
mod m20241201_000002_create_certificates_table;
|
||||||
|
mod m20241201_000003_create_inbound_templates_table;
|
||||||
|
mod m20241201_000004_create_servers_table;
|
||||||
|
mod m20241201_000005_create_server_inbounds_table;
|
||||||
|
mod m20241201_000006_create_user_access_table;
|
||||||
|
mod m20241201_000007_create_inbound_users_table;
|
||||||
|
mod m20250919_000001_update_inbound_users_schema;
|
||||||
|
mod m20250922_000001_add_grpc_hostname_to_servers;
|
||||||
|
mod m20250923_000001_create_dns_providers_table;
|
||||||
|
mod m20250929_000001_create_telegram_config_table;
|
||||||
|
mod m20250929_000002_add_telegram_admin_to_users;
|
||||||
|
mod m20251018_000001_create_user_requests_table;
|
||||||
|
mod m20251018_000002_remove_unique_telegram_id;
|
||||||
|
mod m20251018_000003_add_language_to_user_requests;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![
|
||||||
|
Box::new(m20241201_000001_create_users_table::Migration),
|
||||||
|
Box::new(m20241201_000002_create_certificates_table::Migration),
|
||||||
|
Box::new(m20241201_000003_create_inbound_templates_table::Migration),
|
||||||
|
Box::new(m20241201_000004_create_servers_table::Migration),
|
||||||
|
Box::new(m20241201_000005_create_server_inbounds_table::Migration),
|
||||||
|
Box::new(m20241201_000006_create_user_access_table::Migration),
|
||||||
|
Box::new(m20241201_000007_create_inbound_users_table::Migration),
|
||||||
|
Box::new(m20250919_000001_update_inbound_users_schema::Migration),
|
||||||
|
Box::new(m20250922_000001_add_grpc_hostname_to_servers::Migration),
|
||||||
|
Box::new(m20250923_000001_create_dns_providers_table::Migration),
|
||||||
|
Box::new(m20250929_000001_create_telegram_config_table::Migration),
|
||||||
|
Box::new(m20250929_000002_add_telegram_admin_to_users::Migration),
|
||||||
|
Box::new(m20251018_000001_create_user_requests_table::Migration),
|
||||||
|
Box::new(m20251018_000002_remove_unique_telegram_id::Migration),
|
||||||
|
Box::new(m20251018_000003_add_language_to_user_requests::Migration),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
167
src/database/mod.rs
Normal file
167
src/database/mod.rs
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{
|
||||||
|
ConnectOptions, ConnectionTrait, Database, DatabaseBackend, DatabaseConnection, Statement,
|
||||||
|
};
|
||||||
|
use sea_orm_migration::MigratorTrait;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
|
||||||
|
pub mod entities;
|
||||||
|
pub mod migrations;
|
||||||
|
pub mod repository;
|
||||||
|
|
||||||
|
use migrations::Migrator;
|
||||||
|
|
||||||
|
/// Database connection and management
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DatabaseManager {
|
||||||
|
connection: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DatabaseManager {
|
||||||
|
/// Create a new database connection
|
||||||
|
pub async fn new(config: &DatabaseConfig) -> Result<Self> {
|
||||||
|
info!("Connecting to database...");
|
||||||
|
|
||||||
|
// URL-encode the connection string to handle special characters in passwords
|
||||||
|
let encoded_url = Self::encode_database_url(&config.url)?;
|
||||||
|
|
||||||
|
let mut opt = ConnectOptions::new(&encoded_url);
|
||||||
|
opt.max_connections(config.max_connections)
|
||||||
|
.min_connections(1)
|
||||||
|
.connect_timeout(Duration::from_secs(config.connection_timeout))
|
||||||
|
.acquire_timeout(Duration::from_secs(config.connection_timeout))
|
||||||
|
.idle_timeout(Duration::from_secs(600))
|
||||||
|
.max_lifetime(Duration::from_secs(3600))
|
||||||
|
.sqlx_logging(tracing::level_enabled!(tracing::Level::DEBUG))
|
||||||
|
.sqlx_logging_level(log::LevelFilter::Debug);
|
||||||
|
|
||||||
|
let connection = Database::connect(opt).await?;
|
||||||
|
|
||||||
|
info!("Database connection established successfully");
|
||||||
|
|
||||||
|
let manager = Self { connection };
|
||||||
|
|
||||||
|
// Run migrations if auto_migrate is enabled
|
||||||
|
if config.auto_migrate {
|
||||||
|
manager.migrate().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(manager)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database connection
|
||||||
|
pub fn connection(&self) -> DatabaseConnection {
|
||||||
|
self.connection.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run database migrations
|
||||||
|
pub async fn migrate(&self) -> Result<()> {
|
||||||
|
info!("Running database migrations...");
|
||||||
|
|
||||||
|
match Migrator::up(&self.connection, None).await {
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Database migrations completed successfully");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Migration error: {}", e);
|
||||||
|
Err(e.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check database connection health
|
||||||
|
pub async fn health_check(&self) -> Result<bool> {
|
||||||
|
let stmt = Statement::from_string(DatabaseBackend::Postgres, "SELECT 1".to_owned());
|
||||||
|
match self.connection.execute(stmt).await {
|
||||||
|
Ok(_) => Ok(true),
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Database health check failed: {}", e);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get database schema information
|
||||||
|
pub async fn get_schema_version(&self) -> Result<Option<String>> {
|
||||||
|
// This would typically query a migrations table
|
||||||
|
// For now, we'll just return a placeholder
|
||||||
|
Ok(Some("1.0.0".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode database URL to handle special characters in passwords
|
||||||
|
fn encode_database_url(url: &str) -> Result<String> {
|
||||||
|
// Parse URL manually to handle special characters in password
|
||||||
|
if let Some(at_pos) = url.rfind('@') {
|
||||||
|
if let Some(_colon_pos) = url[..at_pos].rfind(':') {
|
||||||
|
if let Some(scheme_end) = url.find("://") {
|
||||||
|
let scheme = &url[..scheme_end + 3];
|
||||||
|
let user_pass = &url[scheme_end + 3..at_pos];
|
||||||
|
let host_db = &url[at_pos..];
|
||||||
|
|
||||||
|
if let Some(user_colon) = user_pass.find(':') {
|
||||||
|
let user = &user_pass[..user_colon];
|
||||||
|
let password = &user_pass[user_colon + 1..];
|
||||||
|
|
||||||
|
// URL-encode the password part only
|
||||||
|
let encoded_password = urlencoding::encode(password);
|
||||||
|
let encoded_url =
|
||||||
|
format!("{}{}:{}{}", scheme, user, encoded_password, host_db);
|
||||||
|
|
||||||
|
return Ok(encoded_url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If parsing fails, return original URL
|
||||||
|
Ok(url.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_database_url() {
|
||||||
|
let url_with_special_chars = "postgresql://user:pass#word@localhost:5432/db";
|
||||||
|
let encoded = DatabaseManager::encode_database_url(url_with_special_chars).unwrap();
|
||||||
|
assert_eq!(encoded, "postgresql://user:pass%23word@localhost:5432/db");
|
||||||
|
|
||||||
|
let normal_url = "postgresql://user:password@localhost:5432/db";
|
||||||
|
let encoded_normal = DatabaseManager::encode_database_url(normal_url).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
encoded_normal,
|
||||||
|
"postgresql://user:password@localhost:5432/db"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_database_connection() {
|
||||||
|
// This test requires a running PostgreSQL database
|
||||||
|
// Skip in CI or when database is not available
|
||||||
|
if std::env::var("DATABASE_URL").is_err() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let config = DatabaseConfig {
|
||||||
|
url: std::env::var("DATABASE_URL").unwrap(),
|
||||||
|
max_connections: 5,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let db = DatabaseManager::new(&config).await;
|
||||||
|
assert!(db.is_ok());
|
||||||
|
|
||||||
|
if let Ok(db) = db {
|
||||||
|
let health = db.health_check().await;
|
||||||
|
assert!(health.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
104
src/database/repository/certificate.rs
Normal file
104
src/database/repository/certificate.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use crate::database::entities::{certificate, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::*;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CertificateRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CertificateRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(
|
||||||
|
&self,
|
||||||
|
cert_data: certificate::CreateCertificateDto,
|
||||||
|
) -> Result<certificate::Model> {
|
||||||
|
let cert = certificate::ActiveModel::from(cert_data);
|
||||||
|
|
||||||
|
let result = Certificate::insert(cert).exec(&self.db).await?;
|
||||||
|
|
||||||
|
Certificate::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created certificate"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<certificate::Model>> {
|
||||||
|
Ok(Certificate::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn find_by_domain(&self, domain: &str) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::Domain.eq(domain))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn find_by_type(&self, cert_type: &str) -> Result<Vec<certificate::Model>> {
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::CertType.eq(cert_type))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
cert_data: certificate::UpdateCertificateDto,
|
||||||
|
) -> Result<certificate::Model> {
|
||||||
|
let cert = Certificate::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Certificate not found"))?;
|
||||||
|
|
||||||
|
let updated_cert = cert.apply_update(cert_data);
|
||||||
|
|
||||||
|
Ok(updated_cert.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Certificate::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_expiring_soon(&self, days: i64) -> Result<Vec<certificate::Model>> {
|
||||||
|
let threshold = chrono::Utc::now() + chrono::Duration::days(days);
|
||||||
|
|
||||||
|
Ok(Certificate::find()
|
||||||
|
.filter(certificate::Column::ExpiresAt.lt(threshold))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update certificate data (cert and key) and expiration date
|
||||||
|
pub async fn update_certificate_data(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
cert_pem: &str,
|
||||||
|
key_pem: &str,
|
||||||
|
expires_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
) -> Result<certificate::Model> {
|
||||||
|
let mut cert: certificate::ActiveModel = Certificate::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Certificate not found"))?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
cert.cert_data = Set(cert_pem.as_bytes().to_vec());
|
||||||
|
cert.key_data = Set(key_pem.as_bytes().to_vec());
|
||||||
|
cert.expires_at = Set(expires_at);
|
||||||
|
cert.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(cert.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
}
|
||||||
138
src/database/repository/dns_provider.rs
Normal file
138
src/database/repository/dns_provider.rs
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{
|
||||||
|
ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter,
|
||||||
|
Set,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::dns_provider::{
|
||||||
|
ActiveModel, Column, CreateDnsProviderDto, DnsProviderType, Entity, Model, UpdateDnsProviderDto,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct DnsProviderRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DnsProviderRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<Model>> {
|
||||||
|
let providers = Entity::find().all(&self.db).await?;
|
||||||
|
Ok(providers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_active(&self) -> Result<Vec<Model>> {
|
||||||
|
let providers = Entity::find()
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(providers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let provider = Entity::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_name(&self, name: &str) -> Result<Option<Model>> {
|
||||||
|
let provider = Entity::find()
|
||||||
|
.filter(Column::Name.eq(name))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_type(&self, provider_type: &str) -> Result<Vec<Model>> {
|
||||||
|
let providers = Entity::find()
|
||||||
|
.filter(Column::ProviderType.eq(provider_type))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(providers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_active_by_type(&self, provider_type: &str) -> Result<Vec<Model>> {
|
||||||
|
let providers = Entity::find()
|
||||||
|
.filter(Column::ProviderType.eq(provider_type))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(providers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, dto: CreateDnsProviderDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let provider = active_model.insert(&self.db).await?;
|
||||||
|
Ok(provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateDnsProviderDto) -> Result<Option<Model>> {
|
||||||
|
let provider = match self.find_by_id(id).await? {
|
||||||
|
Some(provider) => provider,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated_model = provider.apply_update(dto);
|
||||||
|
let updated_provider = updated_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_provider))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Entity::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn enable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let provider = match self.find_by_id(id).await? {
|
||||||
|
Some(provider) => provider,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = provider.into();
|
||||||
|
active_model.is_active = Set(true);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_provider = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_provider))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn disable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let provider = match self.find_by_id(id).await? {
|
||||||
|
Some(provider) => provider,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = provider.into();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_provider = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_provider))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a provider name already exists
|
||||||
|
pub async fn name_exists(&self, name: &str, exclude_id: Option<Uuid>) -> Result<bool> {
|
||||||
|
let mut query = Entity::find().filter(Column::Name.eq(name));
|
||||||
|
|
||||||
|
if let Some(id) = exclude_id {
|
||||||
|
query = query.filter(Column::Id.ne(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
let count = query.count(&self.db).await?;
|
||||||
|
Ok(count > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the first active provider of a specific type
|
||||||
|
pub async fn get_active_provider_by_type(
|
||||||
|
&self,
|
||||||
|
provider_type: DnsProviderType,
|
||||||
|
) -> Result<Option<Model>> {
|
||||||
|
let provider = Entity::find()
|
||||||
|
.filter(Column::ProviderType.eq(provider_type.as_str()))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(provider)
|
||||||
|
}
|
||||||
|
}
|
||||||
72
src/database/repository/inbound_template.rs
Normal file
72
src/database/repository/inbound_template.rs
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
use crate::database::entities::{inbound_template, prelude::*};
|
||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::*;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct InboundTemplateRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl InboundTemplateRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(
|
||||||
|
&self,
|
||||||
|
template_data: inbound_template::CreateInboundTemplateDto,
|
||||||
|
) -> Result<inbound_template::Model> {
|
||||||
|
let template = inbound_template::ActiveModel::from(template_data);
|
||||||
|
|
||||||
|
let result = InboundTemplate::insert(template).exec(&self.db).await?;
|
||||||
|
|
||||||
|
InboundTemplate::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created template"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_name(&self, name: &str) -> Result<Option<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find()
|
||||||
|
.filter(inbound_template::Column::Name.eq(name))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_protocol(&self, protocol: &str) -> Result<Vec<inbound_template::Model>> {
|
||||||
|
Ok(InboundTemplate::find()
|
||||||
|
.filter(inbound_template::Column::Protocol.eq(protocol))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
template_data: inbound_template::UpdateInboundTemplateDto,
|
||||||
|
) -> Result<inbound_template::Model> {
|
||||||
|
let template = InboundTemplate::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Template not found"))?;
|
||||||
|
|
||||||
|
let updated_template = template.apply_update(template_data);
|
||||||
|
|
||||||
|
Ok(updated_template.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = InboundTemplate::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
249
src/database/repository/inbound_users.rs
Normal file
249
src/database/repository/inbound_users.rs
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, QueryFilter, Set};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::inbound_users::{
|
||||||
|
ActiveModel, Column, CreateInboundUserDto, Entity, Model, UpdateInboundUserDto,
|
||||||
|
};
|
||||||
|
use crate::services::uri_generator::ClientConfigData;
|
||||||
|
|
||||||
|
pub struct InboundUsersRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InboundUsersRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find().all(&self.db).await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = Entity::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all users for a specific inbound
|
||||||
|
pub async fn find_by_inbound_id(&self, inbound_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find active users for a specific inbound
|
||||||
|
pub async fn find_active_by_inbound_id(&self, inbound_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user by user_id and inbound (for uniqueness check - one user per inbound)
|
||||||
|
pub async fn find_by_user_and_inbound(
|
||||||
|
&self,
|
||||||
|
user_id: Uuid,
|
||||||
|
inbound_id: Uuid,
|
||||||
|
) -> Result<Option<Model>> {
|
||||||
|
let user = Entity::find()
|
||||||
|
.filter(Column::UserId.eq(user_id))
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find all inbound access for a specific user
|
||||||
|
pub async fn find_by_user_id(&self, user_id: Uuid) -> Result<Vec<Model>> {
|
||||||
|
let users = Entity::find()
|
||||||
|
.filter(Column::UserId.eq(user_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, dto: CreateInboundUserDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let user = active_model.insert(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateInboundUserDto) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated_model = user.apply_update(dto);
|
||||||
|
let updated_user = updated_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Entity::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enable user (set is_active = true)
|
||||||
|
pub async fn enable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = user.into();
|
||||||
|
active_model.is_active = Set(true);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_user = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disable user (set is_active = false)
|
||||||
|
pub async fn disable(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = match self.find_by_id(id).await? {
|
||||||
|
Some(user) => user,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model: ActiveModel = user.into();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated_user = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated_user))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove all users for a specific inbound (when inbound is deleted)
|
||||||
|
pub async fn remove_all_for_inbound(&self, inbound_id: Uuid) -> Result<u64> {
|
||||||
|
let result = Entity::delete_many()
|
||||||
|
.filter(Column::ServerInboundId.eq(inbound_id))
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if user already has access to this inbound
|
||||||
|
pub async fn user_has_access_to_inbound(
|
||||||
|
&self,
|
||||||
|
user_id: Uuid,
|
||||||
|
inbound_id: Uuid,
|
||||||
|
) -> Result<bool> {
|
||||||
|
let exists = self.find_by_user_and_inbound(user_id, inbound_id).await?;
|
||||||
|
Ok(exists.is_some())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get complete client configuration data for URI generation
|
||||||
|
pub async fn get_client_config_data(
|
||||||
|
&self,
|
||||||
|
user_id: Uuid,
|
||||||
|
server_inbound_id: Uuid,
|
||||||
|
) -> Result<Option<ClientConfigData>> {
|
||||||
|
use crate::database::entities::{
|
||||||
|
certificate, inbound_template, server, server_inbound, user,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the inbound_user record first
|
||||||
|
let inbound_user = Entity::find()
|
||||||
|
.filter(Column::UserId.eq(user_id))
|
||||||
|
.filter(Column::ServerInboundId.eq(server_inbound_id))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(inbound_user) = inbound_user {
|
||||||
|
// Get user info
|
||||||
|
let user_entity = user::Entity::find_by_id(inbound_user.user_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("User not found"))?;
|
||||||
|
|
||||||
|
// Get server inbound info
|
||||||
|
let server_inbound_entity =
|
||||||
|
server_inbound::Entity::find_by_id(inbound_user.server_inbound_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
// Get server info
|
||||||
|
let server_entity = server::Entity::find_by_id(server_inbound_entity.server_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server not found"))?;
|
||||||
|
|
||||||
|
// Get template info
|
||||||
|
let template_entity =
|
||||||
|
inbound_template::Entity::find_by_id(server_inbound_entity.template_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Template not found"))?;
|
||||||
|
|
||||||
|
// Get certificate info (optional)
|
||||||
|
let certificate_domain = if let Some(cert_id) = server_inbound_entity.certificate_id {
|
||||||
|
certificate::Entity::find_by_id(cert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.map(|cert| cert.domain)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let config = ClientConfigData {
|
||||||
|
user_name: user_entity.name,
|
||||||
|
xray_user_id: inbound_user.xray_user_id,
|
||||||
|
password: inbound_user.password,
|
||||||
|
level: inbound_user.level,
|
||||||
|
hostname: server_entity.hostname,
|
||||||
|
port: server_inbound_entity
|
||||||
|
.port_override
|
||||||
|
.unwrap_or(template_entity.default_port),
|
||||||
|
protocol: template_entity.protocol,
|
||||||
|
stream_settings: template_entity.stream_settings,
|
||||||
|
base_settings: template_entity.base_settings,
|
||||||
|
certificate_domain,
|
||||||
|
requires_tls: template_entity.requires_tls,
|
||||||
|
variable_values: server_inbound_entity.variable_values,
|
||||||
|
server_name: server_entity.name,
|
||||||
|
inbound_tag: server_inbound_entity.tag,
|
||||||
|
template_name: template_entity.name,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(config))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all client configuration data for a user
|
||||||
|
pub async fn get_all_client_configs_for_user(
|
||||||
|
&self,
|
||||||
|
user_id: Uuid,
|
||||||
|
) -> Result<Vec<ClientConfigData>> {
|
||||||
|
// Get all active inbound users for this user
|
||||||
|
let inbound_users = Entity::find()
|
||||||
|
.filter(Column::UserId.eq(user_id))
|
||||||
|
.filter(Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut configs = Vec::new();
|
||||||
|
|
||||||
|
for inbound_user in inbound_users {
|
||||||
|
// Get the client config data for each inbound
|
||||||
|
if let Ok(Some(config)) = self
|
||||||
|
.get_client_config_data(user_id, inbound_user.server_inbound_id)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
configs.push(config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(configs)
|
||||||
|
}
|
||||||
|
}
|
||||||
19
src/database/repository/mod.rs
Normal file
19
src/database/repository/mod.rs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
pub mod certificate;
|
||||||
|
pub mod dns_provider;
|
||||||
|
pub mod inbound_template;
|
||||||
|
pub mod inbound_users;
|
||||||
|
pub mod server;
|
||||||
|
pub mod server_inbound;
|
||||||
|
pub mod telegram_config;
|
||||||
|
pub mod user;
|
||||||
|
pub mod user_request;
|
||||||
|
|
||||||
|
pub use certificate::CertificateRepository;
|
||||||
|
pub use dns_provider::DnsProviderRepository;
|
||||||
|
pub use inbound_template::InboundTemplateRepository;
|
||||||
|
pub use inbound_users::InboundUsersRepository;
|
||||||
|
pub use server::ServerRepository;
|
||||||
|
pub use server_inbound::ServerInboundRepository;
|
||||||
|
pub use telegram_config::TelegramConfigRepository;
|
||||||
|
pub use user::UserRepository;
|
||||||
|
pub use user_request::UserRequestRepository;
|
||||||
94
src/database/repository/server.rs
Normal file
94
src/database/repository/server.rs
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
use crate::database::entities::{prelude::*, server};
|
||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::*;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ServerRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ServerRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, server_data: server::CreateServerDto) -> Result<server::Model> {
|
||||||
|
let server = server::ActiveModel::from(server_data);
|
||||||
|
|
||||||
|
let result = Server::insert(server).exec(&self.db).await?;
|
||||||
|
|
||||||
|
Server::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<server::Model>> {
|
||||||
|
Ok(Server::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_name(&self, name: &str) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Name.eq(name))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_hostname(&self, hostname: &str) -> Result<Option<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Hostname.eq(hostname))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_status(&self, status: &str) -> Result<Vec<server::Model>> {
|
||||||
|
Ok(Server::find()
|
||||||
|
.filter(server::Column::Status.eq(status))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
server_data: server::UpdateServerDto,
|
||||||
|
) -> Result<server::Model> {
|
||||||
|
let server = Server::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server not found"))?;
|
||||||
|
|
||||||
|
let updated_server = server.apply_update(server_data);
|
||||||
|
|
||||||
|
Ok(updated_server.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = Server::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_grpc_endpoint(&self, id: Uuid) -> Result<String> {
|
||||||
|
let server = self
|
||||||
|
.find_by_id(id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server not found"))?;
|
||||||
|
|
||||||
|
Ok(server.get_grpc_endpoint())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_all(&self) -> Result<Vec<server::Model>> {
|
||||||
|
Ok(Server::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn count(&self) -> Result<u64> {
|
||||||
|
let count = Server::find().count(&self.db).await?;
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
}
|
||||||
206
src/database/repository/server_inbound.rs
Normal file
206
src/database/repository/server_inbound.rs
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
use crate::database::entities::{prelude::*, server_inbound};
|
||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::*;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ServerInboundRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ServerInboundRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(
|
||||||
|
&self,
|
||||||
|
server_id: Uuid,
|
||||||
|
inbound_data: server_inbound::CreateServerInboundDto,
|
||||||
|
) -> Result<server_inbound::Model> {
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound_data.into();
|
||||||
|
inbound.id = Set(Uuid::new_v4());
|
||||||
|
inbound.server_id = Set(server_id);
|
||||||
|
inbound.created_at = Set(chrono::Utc::now());
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let result = ServerInbound::insert(inbound).exec(&self.db).await?;
|
||||||
|
|
||||||
|
ServerInbound::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server inbound"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_with_protocol(
|
||||||
|
&self,
|
||||||
|
server_id: Uuid,
|
||||||
|
inbound_data: server_inbound::CreateServerInboundDto,
|
||||||
|
protocol: &str,
|
||||||
|
) -> Result<server_inbound::Model> {
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound_data.into();
|
||||||
|
inbound.id = Set(Uuid::new_v4());
|
||||||
|
inbound.server_id = Set(server_id);
|
||||||
|
inbound.created_at = Set(chrono::Utc::now());
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
// Override tag with protocol prefix
|
||||||
|
let id = inbound.id.as_ref();
|
||||||
|
inbound.tag = Set(format!("{}-inbound-{}", protocol, id));
|
||||||
|
|
||||||
|
let result = ServerInbound::insert(inbound).exec(&self.db).await?;
|
||||||
|
|
||||||
|
ServerInbound::find_by_id(result.last_insert_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to retrieve created server inbound"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find().all(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find_by_id(id).one(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_server_id(&self, server_id: Uuid) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_server_id_with_template(
|
||||||
|
&self,
|
||||||
|
server_id: Uuid,
|
||||||
|
) -> Result<Vec<server_inbound::ServerInboundResponse>> {
|
||||||
|
|
||||||
|
let inbounds = ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut responses = Vec::new();
|
||||||
|
for inbound in inbounds {
|
||||||
|
let mut response = server_inbound::ServerInboundResponse::from(inbound.clone());
|
||||||
|
|
||||||
|
// Load template information
|
||||||
|
if let Ok(Some(template)) = InboundTemplate::find_by_id(inbound.template_id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
response.template_name = Some(template.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load certificate information
|
||||||
|
if let Some(cert_id) = inbound.certificate_id {
|
||||||
|
if let Ok(Some(certificate)) = Certificate::find_by_id(cert_id).one(&self.db).await
|
||||||
|
{
|
||||||
|
response.certificate_name = Some(certificate.domain);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
responses.push(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(responses)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_template_id(
|
||||||
|
&self,
|
||||||
|
template_id: Uuid,
|
||||||
|
) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::TemplateId.eq(template_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_tag(&self, tag: &str) -> Result<Option<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::Tag.eq(tag))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_certificate_id(
|
||||||
|
&self,
|
||||||
|
certificate_id: Uuid,
|
||||||
|
) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::CertificateId.eq(certificate_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_active_by_server(
|
||||||
|
&self,
|
||||||
|
server_id: Uuid,
|
||||||
|
) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
Ok(ServerInbound::find()
|
||||||
|
.filter(server_inbound::Column::ServerId.eq(server_id))
|
||||||
|
.filter(server_inbound::Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
inbound_data: server_inbound::UpdateServerInboundDto,
|
||||||
|
) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let updated_inbound = inbound.apply_update(inbound_data);
|
||||||
|
|
||||||
|
Ok(updated_inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = ServerInbound::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn activate(&self, id: Uuid) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound.into();
|
||||||
|
inbound.is_active = Set(true);
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn deactivate(&self, id: Uuid) -> Result<server_inbound::Model> {
|
||||||
|
let inbound = ServerInbound::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Server inbound not found"))?;
|
||||||
|
|
||||||
|
let mut inbound: server_inbound::ActiveModel = inbound.into();
|
||||||
|
inbound.is_active = Set(false);
|
||||||
|
inbound.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(inbound.update(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_user_id(&self, _user_id: Uuid) -> Result<Vec<server_inbound::Model>> {
|
||||||
|
// This would need a join with user_access table
|
||||||
|
// For now, returning empty vec as placeholder
|
||||||
|
// TODO: Implement proper join query
|
||||||
|
Ok(vec![])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn count(&self) -> Result<u64> {
|
||||||
|
let count = ServerInbound::find().count(&self.db).await?;
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
}
|
||||||
169
src/database/repository/telegram_config.rs
Normal file
169
src/database/repository/telegram_config.rs
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{
|
||||||
|
ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, QueryFilter, QueryOrder, Set,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::telegram_config::{
|
||||||
|
self, CreateTelegramConfigDto, Model, UpdateTelegramConfigDto,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct TelegramConfigRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TelegramConfigRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the current active configuration (should be only one)
|
||||||
|
pub async fn get_active(&self) -> Result<Option<Model>> {
|
||||||
|
Ok(telegram_config::Entity::find()
|
||||||
|
.filter(telegram_config::Column::IsActive.eq(true))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get configuration by ID
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
Ok(telegram_config::Entity::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the latest configuration (active or not)
|
||||||
|
pub async fn get_latest(&self) -> Result<Option<Model>> {
|
||||||
|
Ok(telegram_config::Entity::find()
|
||||||
|
.order_by_desc(telegram_config::Column::CreatedAt)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new configuration (deactivates previous if exists)
|
||||||
|
pub async fn create(&self, dto: CreateTelegramConfigDto) -> Result<Model> {
|
||||||
|
// If is_active is true, deactivate all other configs
|
||||||
|
if dto.is_active {
|
||||||
|
self.deactivate_all().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let model = telegram_config::ActiveModel {
|
||||||
|
id: Set(Uuid::new_v4()),
|
||||||
|
bot_token: Set(dto.bot_token),
|
||||||
|
is_active: Set(dto.is_active),
|
||||||
|
created_at: Set(chrono::Utc::now()),
|
||||||
|
updated_at: Set(chrono::Utc::now()),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(model.insert(&self.db).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update configuration
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateTelegramConfigDto) -> Result<Option<Model>> {
|
||||||
|
let model = telegram_config::Entity::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(model) = model else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// If activating this config, deactivate others
|
||||||
|
if dto.is_active == Some(true) {
|
||||||
|
self.deactivate_all_except(id).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut active_model = model.into_active_model();
|
||||||
|
|
||||||
|
if let Some(bot_token) = dto.bot_token {
|
||||||
|
active_model.bot_token = Set(bot_token);
|
||||||
|
}
|
||||||
|
if let Some(is_active) = dto.is_active {
|
||||||
|
active_model.is_active = Set(is_active);
|
||||||
|
}
|
||||||
|
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(Some(active_model.update(&self.db).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Activate a configuration (deactivates all others)
|
||||||
|
pub async fn activate(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
self.deactivate_all_except(id).await?;
|
||||||
|
|
||||||
|
let model = telegram_config::Entity::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(model) = model else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model = model.into_active_model();
|
||||||
|
active_model.is_active = Set(true);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(Some(active_model.update(&self.db).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deactivate a configuration
|
||||||
|
pub async fn deactivate(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let model = telegram_config::Entity::find_by_id(id)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(model) = model else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut active_model = model.into_active_model();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
Ok(Some(active_model.update(&self.db).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete configuration
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = telegram_config::Entity::delete_by_id(id)
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deactivate all configurations
|
||||||
|
async fn deactivate_all(&self) -> Result<()> {
|
||||||
|
let configs = telegram_config::Entity::find()
|
||||||
|
.filter(telegram_config::Column::IsActive.eq(true))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for config in configs {
|
||||||
|
let mut active_model = config.into_active_model();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
active_model.update(&self.db).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deactivate all configurations except one
|
||||||
|
async fn deactivate_all_except(&self, except_id: Uuid) -> Result<()> {
|
||||||
|
let configs = telegram_config::Entity::find()
|
||||||
|
.filter(telegram_config::Column::IsActive.eq(true))
|
||||||
|
.filter(telegram_config::Column::Id.ne(except_id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for config in configs {
|
||||||
|
let mut active_model = config.into_active_model();
|
||||||
|
active_model.is_active = Set(false);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
active_model.update(&self.db).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
275
src/database/repository/user.rs
Normal file
275
src/database/repository/user.rs
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder,
|
||||||
|
QuerySelect,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::user::{
|
||||||
|
ActiveModel, Column, CreateUserDto, Entity as User, Model, UpdateUserDto,
|
||||||
|
};
|
||||||
|
use sea_orm::{ActiveModelTrait, Set};
|
||||||
|
|
||||||
|
pub struct UserRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all users with pagination
|
||||||
|
pub async fn get_all(&self, page: u64, per_page: u64) -> Result<Vec<Model>> {
|
||||||
|
let users = User::find()
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page)
|
||||||
|
.fetch_page(page.saturating_sub(1))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user by ID
|
||||||
|
pub async fn get_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let user = User::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user by ID (alias for get_by_id)
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
self.get_by_id(id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user by telegram ID
|
||||||
|
pub async fn get_by_telegram_id(&self, telegram_id: i64) -> Result<Option<Model>> {
|
||||||
|
let user = User::find()
|
||||||
|
.filter(Column::TelegramId.eq(telegram_id))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Search users by name (with pagination for backward compatibility)
|
||||||
|
pub async fn search_by_name(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
page: u64,
|
||||||
|
per_page: u64,
|
||||||
|
) -> Result<Vec<Model>> {
|
||||||
|
let users = User::find()
|
||||||
|
.filter(Column::Name.contains(query))
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page)
|
||||||
|
.fetch_page(page.saturating_sub(1))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Universal search - searches by name, telegram_id, or user_id
|
||||||
|
pub async fn search(&self, query: &str) -> Result<Vec<Model>> {
|
||||||
|
use sea_orm::Condition;
|
||||||
|
|
||||||
|
let mut condition = Condition::any();
|
||||||
|
|
||||||
|
// Search by name (case-insensitive partial match)
|
||||||
|
condition = condition.add(Column::Name.contains(query));
|
||||||
|
|
||||||
|
// Try to parse as telegram_id (i64)
|
||||||
|
if let Ok(telegram_id) = query.parse::<i64>() {
|
||||||
|
condition = condition.add(Column::TelegramId.eq(telegram_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse as UUID (user_id)
|
||||||
|
if let Ok(user_id) = Uuid::parse_str(query) {
|
||||||
|
condition = condition.add(Column::Id.eq(user_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
let users = User::find()
|
||||||
|
.filter(condition)
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.limit(100) // Reasonable limit to prevent huge results
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new user
|
||||||
|
pub async fn create(&self, dto: CreateUserDto) -> Result<Model> {
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let user = User::insert(active_model)
|
||||||
|
.exec_with_returning(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update user by ID
|
||||||
|
pub async fn update(&self, id: Uuid, dto: UpdateUserDto) -> Result<Option<Model>> {
|
||||||
|
if let Some(user) = self.get_by_id(id).await? {
|
||||||
|
let active_model = user.apply_update(dto);
|
||||||
|
User::update(active_model).exec(&self.db).await?;
|
||||||
|
// Fetch the updated user
|
||||||
|
self.get_by_id(id).await
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete user by ID
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = User::delete_by_id(id).exec(&self.db).await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get total count of users
|
||||||
|
pub async fn count(&self) -> Result<u64> {
|
||||||
|
let count = User::find().count(&self.db).await?;
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if telegram ID is already used
|
||||||
|
pub async fn telegram_id_exists(&self, telegram_id: i64) -> Result<bool> {
|
||||||
|
let count = User::find()
|
||||||
|
.filter(Column::TelegramId.eq(telegram_id))
|
||||||
|
.count(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(count > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set user as Telegram admin
|
||||||
|
pub async fn set_telegram_admin(&self, user_id: Uuid, is_admin: bool) -> Result<Option<Model>> {
|
||||||
|
if let Some(user) = self.get_by_id(user_id).await? {
|
||||||
|
let mut active_model: ActiveModel = user.into();
|
||||||
|
active_model.is_telegram_admin = Set(is_admin);
|
||||||
|
active_model.updated_at = Set(chrono::Utc::now());
|
||||||
|
|
||||||
|
let updated = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if user is Telegram admin
|
||||||
|
pub async fn is_telegram_admin(&self, user_id: Uuid) -> Result<bool> {
|
||||||
|
if let Some(user) = self.get_by_id(user_id).await? {
|
||||||
|
Ok(user.is_telegram_admin)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if telegram_id is admin
|
||||||
|
pub async fn is_telegram_id_admin(&self, telegram_id: i64) -> Result<bool> {
|
||||||
|
if let Some(user) = self.get_by_telegram_id(telegram_id).await? {
|
||||||
|
Ok(user.is_telegram_admin)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all Telegram admins
|
||||||
|
pub async fn get_telegram_admins(&self) -> Result<Vec<Model>> {
|
||||||
|
let admins = User::find()
|
||||||
|
.filter(Column::IsTelegramAdmin.eq(true))
|
||||||
|
.filter(Column::TelegramId.is_not_null())
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(admins)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the first admin user (for system operations)
|
||||||
|
pub async fn get_first_admin(&self) -> Result<Option<Model>> {
|
||||||
|
let admin = User::find()
|
||||||
|
.filter(Column::IsTelegramAdmin.eq(true))
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(admin)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Count total users
|
||||||
|
pub async fn count_all(&self) -> Result<i64> {
|
||||||
|
let count = User::find().count(&self.db).await?;
|
||||||
|
|
||||||
|
Ok(count as i64)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find users with pagination
|
||||||
|
pub async fn find_paginated(&self, offset: u64, limit: u64) -> Result<Vec<Model>> {
|
||||||
|
let users = User::find()
|
||||||
|
.order_by_desc(Column::CreatedAt)
|
||||||
|
.offset(offset)
|
||||||
|
.limit(limit)
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::config::DatabaseConfig;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
|
||||||
|
async fn setup_test_db() -> Result<UserRepository> {
|
||||||
|
let config = DatabaseConfig {
|
||||||
|
url: std::env::var("DATABASE_URL").unwrap_or_else(|_| "sqlite::memory:".to_string()),
|
||||||
|
max_connections: 5,
|
||||||
|
connection_timeout: 30,
|
||||||
|
auto_migrate: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
let db_manager = DatabaseManager::new(&config).await?;
|
||||||
|
Ok(UserRepository::new(db_manager.connection().clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_user_crud() {
|
||||||
|
let repo = match setup_test_db().await {
|
||||||
|
Ok(repo) => repo,
|
||||||
|
Err(_) => return, // Skip test if no database available
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
let create_dto = CreateUserDto {
|
||||||
|
name: "Test User".to_string(),
|
||||||
|
comment: Some("Test comment".to_string()),
|
||||||
|
telegram_id: Some(123456789),
|
||||||
|
is_telegram_admin: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let created_user = repo.create(create_dto).await.unwrap();
|
||||||
|
assert_eq!(created_user.name, "Test User");
|
||||||
|
assert_eq!(created_user.telegram_id, Some(123456789));
|
||||||
|
|
||||||
|
// Get by ID
|
||||||
|
let fetched_user = repo.get_by_id(created_user.id).await.unwrap();
|
||||||
|
assert!(fetched_user.is_some());
|
||||||
|
assert_eq!(fetched_user.unwrap().name, "Test User");
|
||||||
|
|
||||||
|
// Update user
|
||||||
|
let update_dto = UpdateUserDto {
|
||||||
|
name: Some("Updated User".to_string()),
|
||||||
|
comment: None,
|
||||||
|
telegram_id: None,
|
||||||
|
is_telegram_admin: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated_user = repo.update(created_user.id, update_dto).await.unwrap();
|
||||||
|
assert!(updated_user.is_some());
|
||||||
|
assert_eq!(updated_user.unwrap().name, "Updated User");
|
||||||
|
|
||||||
|
// Delete user
|
||||||
|
let deleted = repo.delete(created_user.id).await.unwrap();
|
||||||
|
assert!(deleted);
|
||||||
|
|
||||||
|
// Verify deletion
|
||||||
|
let deleted_user = repo.get_by_id(created_user.id).await.unwrap();
|
||||||
|
assert!(deleted_user.is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
164
src/database/repository/user_request.rs
Normal file
164
src/database/repository/user_request.rs
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
use crate::database::entities::user_request::{
|
||||||
|
self, ActiveModel, CreateUserRequestDto, Model, RequestStatus, UpdateUserRequestDto,
|
||||||
|
};
|
||||||
|
use anyhow::Result;
|
||||||
|
use sea_orm::{
|
||||||
|
ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder,
|
||||||
|
QuerySelect,
|
||||||
|
};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub struct UserRequestRepository {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserRequestRepository {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_all(&self, page: u64, per_page: u64) -> Result<(Vec<Model>, u64)> {
|
||||||
|
let paginator = user_request::Entity::find()
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page);
|
||||||
|
|
||||||
|
let total = paginator.num_items().await?;
|
||||||
|
let items = paginator.fetch_page(page - 1).await?;
|
||||||
|
|
||||||
|
Ok((items, total))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_pending(&self, page: u64, per_page: u64) -> Result<(Vec<Model>, u64)> {
|
||||||
|
let paginator = user_request::Entity::find()
|
||||||
|
.filter(user_request::Column::Status.eq("pending"))
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.paginate(&self.db, per_page);
|
||||||
|
|
||||||
|
let total = paginator.num_items().await?;
|
||||||
|
let items = paginator.fetch_page(page - 1).await?;
|
||||||
|
|
||||||
|
Ok((items, total))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_id(&self, id: Uuid) -> Result<Option<Model>> {
|
||||||
|
let request = user_request::Entity::find_by_id(id).one(&self.db).await?;
|
||||||
|
Ok(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_by_telegram_id(&self, telegram_id: i64) -> Result<Vec<Model>> {
|
||||||
|
let requests = user_request::Entity::find()
|
||||||
|
.filter(user_request::Column::TelegramId.eq(telegram_id))
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(requests)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find recent user requests (ordered by creation date)
|
||||||
|
pub async fn find_recent(&self, limit: u64) -> Result<Vec<Model>> {
|
||||||
|
let requests = user_request::Entity::find()
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.limit(limit)
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(requests)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_pending_by_telegram_id(&self, telegram_id: i64) -> Result<Option<Model>> {
|
||||||
|
let request = user_request::Entity::find()
|
||||||
|
.filter(user_request::Column::TelegramId.eq(telegram_id))
|
||||||
|
.filter(user_request::Column::Status.eq("pending"))
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.one(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Count total requests
|
||||||
|
pub async fn count_all(&self) -> Result<i64> {
|
||||||
|
let count = user_request::Entity::find().count(&self.db).await?;
|
||||||
|
|
||||||
|
Ok(count as i64)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find requests with pagination
|
||||||
|
pub async fn find_paginated(&self, offset: u64, limit: u64) -> Result<Vec<Model>> {
|
||||||
|
let requests = user_request::Entity::find()
|
||||||
|
.order_by_desc(user_request::Column::CreatedAt)
|
||||||
|
.offset(offset)
|
||||||
|
.limit(limit)
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(requests)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(&self, dto: CreateUserRequestDto) -> Result<Model> {
|
||||||
|
use sea_orm::ActiveModelTrait;
|
||||||
|
let active_model: ActiveModel = dto.into();
|
||||||
|
let request = active_model.insert(&self.db).await?;
|
||||||
|
Ok(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
dto: UpdateUserRequestDto,
|
||||||
|
processed_by: Uuid,
|
||||||
|
) -> Result<Option<Model>> {
|
||||||
|
let model = user_request::Entity::find_by_id(id).one(&self.db).await?;
|
||||||
|
|
||||||
|
match model {
|
||||||
|
Some(model) => {
|
||||||
|
use sea_orm::ActiveModelTrait;
|
||||||
|
let active_model = model.apply_update(dto, processed_by);
|
||||||
|
let updated = active_model.update(&self.db).await?;
|
||||||
|
Ok(Some(updated))
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn approve(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
response_message: Option<String>,
|
||||||
|
processed_by: Uuid,
|
||||||
|
) -> Result<Option<Model>> {
|
||||||
|
let dto = UpdateUserRequestDto {
|
||||||
|
status: Some(RequestStatus::Approved.as_str().to_string()),
|
||||||
|
response_message,
|
||||||
|
processed_by_user_id: None,
|
||||||
|
};
|
||||||
|
self.update(id, dto, processed_by).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn decline(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
response_message: Option<String>,
|
||||||
|
processed_by: Uuid,
|
||||||
|
) -> Result<Option<Model>> {
|
||||||
|
let dto = UpdateUserRequestDto {
|
||||||
|
status: Some(RequestStatus::Declined.as_str().to_string()),
|
||||||
|
response_message,
|
||||||
|
processed_by_user_id: None,
|
||||||
|
};
|
||||||
|
self.update(id, dto, processed_by).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(&self, id: Uuid) -> Result<bool> {
|
||||||
|
let result = user_request::Entity::delete_by_id(id)
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(result.rows_affected > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn count_by_status(&self, status: RequestStatus) -> Result<u64> {
|
||||||
|
let count = user_request::Entity::find()
|
||||||
|
.filter(user_request::Column::Status.eq(status.as_str()))
|
||||||
|
.count(&self.db)
|
||||||
|
.await?;
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
}
|
||||||
176
src/main.rs
Normal file
176
src/main.rs
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
|
mod config;
|
||||||
|
mod database;
|
||||||
|
mod services;
|
||||||
|
mod web;
|
||||||
|
|
||||||
|
use config::{args::parse_args, AppConfig};
|
||||||
|
use database::DatabaseManager;
|
||||||
|
use services::{TaskScheduler, TelegramService, XrayService};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
// Initialize default crypto provider for rustls
|
||||||
|
rustls::crypto::aws_lc_rs::default_provider()
|
||||||
|
.install_default()
|
||||||
|
.expect("Failed to install rustls crypto provider");
|
||||||
|
|
||||||
|
// Parse command line arguments first
|
||||||
|
let args = parse_args();
|
||||||
|
|
||||||
|
// Initialize logging early with basic configuration
|
||||||
|
init_logging(&args.log_level.as_deref().unwrap_or("info"))?;
|
||||||
|
|
||||||
|
// Handle special flags
|
||||||
|
if args.print_default_config {
|
||||||
|
print_default_config()?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
let config = match AppConfig::load() {
|
||||||
|
Ok(config) => config,
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to load configuration: {}", e);
|
||||||
|
if args.validate_config {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
AppConfig::default()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate configuration if requested
|
||||||
|
if args.validate_config {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display configuration summary
|
||||||
|
config.display_summary();
|
||||||
|
|
||||||
|
// Print environment info in debug mode
|
||||||
|
if tracing::level_enabled!(tracing::Level::DEBUG) {
|
||||||
|
config::env::EnvVars::print_env_info();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize database connection
|
||||||
|
let db = match DatabaseManager::new(&config.database).await {
|
||||||
|
Ok(db) => db,
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to initialize database: {}", e);
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Perform database health check
|
||||||
|
match db.health_check().await {
|
||||||
|
Ok(false) => tracing::warn!("Database health check failed"),
|
||||||
|
Err(e) => tracing::error!("Database health check error: {}", e),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize event bus first
|
||||||
|
let event_receiver = crate::services::events::init_event_bus();
|
||||||
|
|
||||||
|
// Initialize xray service
|
||||||
|
let xray_service = XrayService::new();
|
||||||
|
|
||||||
|
// Initialize and start task scheduler with dependencies
|
||||||
|
let mut task_scheduler = TaskScheduler::new().await?;
|
||||||
|
task_scheduler.start(db.clone(), xray_service).await?;
|
||||||
|
|
||||||
|
// Start event-driven sync handler with the receiver
|
||||||
|
TaskScheduler::start_event_handler(db.clone(), event_receiver).await;
|
||||||
|
|
||||||
|
// Initialize Telegram service if needed
|
||||||
|
let telegram_service = Arc::new(TelegramService::new(db.clone(), config.clone()));
|
||||||
|
if let Err(e) = telegram_service.initialize().await {
|
||||||
|
tracing::warn!("Failed to initialize Telegram service: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start web server with task scheduler
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
result = web::start_server(db, config.clone(), Some(telegram_service.clone())) => {
|
||||||
|
match result {
|
||||||
|
Err(e) => tracing::error!("Web server error: {}", e),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = tokio::signal::ctrl_c() => {
|
||||||
|
if let Err(e) = task_scheduler.shutdown().await {
|
||||||
|
tracing::error!("Error shutting down task scheduler: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_logging(level: &str) -> Result<()> {
|
||||||
|
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
|
||||||
|
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new(level));
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter)
|
||||||
|
.with(
|
||||||
|
tracing_subscriber::fmt::layer()
|
||||||
|
.with_target(true) // Show module names
|
||||||
|
.with_thread_ids(false)
|
||||||
|
.with_thread_names(false)
|
||||||
|
.with_file(false)
|
||||||
|
.with_line_number(false)
|
||||||
|
.compact(),
|
||||||
|
)
|
||||||
|
.try_init()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_default_config() -> Result<()> {
|
||||||
|
let default_config = AppConfig::default();
|
||||||
|
let toml_content = toml::to_string_pretty(&default_config)?;
|
||||||
|
|
||||||
|
println!("# Default configuration for Xray Admin Panel");
|
||||||
|
println!("# Save this to config.toml and modify as needed\n");
|
||||||
|
println!("{}", toml_content);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn mask_url(url: &str) -> String {
|
||||||
|
if let Ok(parsed) = url::Url::parse(url) {
|
||||||
|
if parsed.password().is_some() {
|
||||||
|
let mut masked = parsed.clone();
|
||||||
|
masked.set_password(Some("***")).unwrap();
|
||||||
|
masked.to_string()
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
url.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_url() {
|
||||||
|
let url = "postgresql://user:password@localhost/db";
|
||||||
|
let masked = mask_url(url);
|
||||||
|
assert!(masked.contains("***"));
|
||||||
|
assert!(!masked.contains("password"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mask_url_no_password() {
|
||||||
|
let url = "postgresql://user@localhost/db";
|
||||||
|
let masked = mask_url(url);
|
||||||
|
assert_eq!(masked, url);
|
||||||
|
}
|
||||||
|
}
|
||||||
328
src/services/acme/client.rs
Normal file
328
src/services/acme/client.rs
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
use instant_acme::{
|
||||||
|
Account, AuthorizationStatus, ChallengeType, Identifier, NewAccount, NewOrder, OrderStatus,
|
||||||
|
};
|
||||||
|
use rcgen::{CertificateParams, DistinguishedName, KeyPair};
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
use tokio::time::sleep;
|
||||||
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
|
use crate::services::acme::{AcmeError, CloudflareClient};
|
||||||
|
|
||||||
|
pub struct AcmeClient {
|
||||||
|
cloudflare: CloudflareClient,
|
||||||
|
account: Account,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AcmeClient {
|
||||||
|
pub async fn new(
|
||||||
|
cloudflare_token: String,
|
||||||
|
email: &str,
|
||||||
|
directory_url: String,
|
||||||
|
) -> Result<Self, AcmeError> {
|
||||||
|
info!("Creating ACME client for directory: {}", directory_url);
|
||||||
|
|
||||||
|
let cloudflare = CloudflareClient::new(cloudflare_token)?;
|
||||||
|
|
||||||
|
// Create Let's Encrypt account
|
||||||
|
info!("Creating Let's Encrypt account for: {}", email);
|
||||||
|
let (account, _credentials) = Account::builder()
|
||||||
|
.map_err(|e| AcmeError::AccountCreation(e.to_string()))?
|
||||||
|
.create(
|
||||||
|
&NewAccount {
|
||||||
|
contact: &[&format!("mailto:{}", email)],
|
||||||
|
terms_of_service_agreed: true,
|
||||||
|
only_return_existing: false,
|
||||||
|
},
|
||||||
|
directory_url.clone(),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::AccountCreation(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
cloudflare,
|
||||||
|
account,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_certificate(
|
||||||
|
&mut self,
|
||||||
|
domain: &str,
|
||||||
|
base_domain: &str,
|
||||||
|
) -> Result<(String, String), AcmeError> {
|
||||||
|
info!("Starting certificate request for domain: {}", domain);
|
||||||
|
|
||||||
|
// Validate domain
|
||||||
|
if domain.is_empty() || base_domain.is_empty() {
|
||||||
|
return Err(AcmeError::InvalidDomain(
|
||||||
|
"Domain cannot be empty".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new order
|
||||||
|
let identifiers = vec![Identifier::Dns(domain.to_string())];
|
||||||
|
let mut order = self
|
||||||
|
.account
|
||||||
|
.new_order(&NewOrder::new(&identifiers))
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::OrderCreation(e.to_string()))?;
|
||||||
|
|
||||||
|
debug!("Created order");
|
||||||
|
|
||||||
|
// Process authorizations
|
||||||
|
let mut authorizations = order.authorizations();
|
||||||
|
|
||||||
|
while let Some(authz_result) = authorizations.next().await {
|
||||||
|
let mut authz = authz_result.map_err(|e| AcmeError::Challenge(e.to_string()))?;
|
||||||
|
|
||||||
|
let identifier = format!("{:?}", authz.identifier());
|
||||||
|
|
||||||
|
if authz.status == AuthorizationStatus::Valid {
|
||||||
|
info!("Authorization already valid for: {:?}", identifier);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get challenge value and record ID first
|
||||||
|
let (_challenge_value, record_id) = {
|
||||||
|
// Find DNS challenge
|
||||||
|
let mut challenge = authz
|
||||||
|
.challenge(ChallengeType::Dns01)
|
||||||
|
.ok_or_else(|| AcmeError::Challenge("No DNS challenge found".to_string()))?;
|
||||||
|
|
||||||
|
info!("Processing DNS challenge for: {:?}", identifier);
|
||||||
|
|
||||||
|
// Get challenge value - use key authorization from challenge
|
||||||
|
let challenge_value = challenge.key_authorization().dns_value();
|
||||||
|
debug!("Challenge value: {}", challenge_value);
|
||||||
|
|
||||||
|
// Create DNS record
|
||||||
|
let challenge_domain = format!("_acme-challenge.{}", domain);
|
||||||
|
let record_id = self
|
||||||
|
.cloudflare
|
||||||
|
.create_txt_record(base_domain, &challenge_domain, &challenge_value)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Created DNS TXT record, waiting for propagation...");
|
||||||
|
|
||||||
|
// Wait for DNS propagation
|
||||||
|
self.wait_for_dns_propagation(&challenge_domain, &challenge_value)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Submit challenge
|
||||||
|
info!("Submitting challenge...");
|
||||||
|
challenge
|
||||||
|
.set_ready()
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::Challenge(e.to_string()))?;
|
||||||
|
|
||||||
|
(challenge_value, record_id)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Wait for challenge completion
|
||||||
|
info!("Waiting for challenge validation (5 seconds)...");
|
||||||
|
sleep(Duration::from_secs(5)).await;
|
||||||
|
|
||||||
|
// Cleanup DNS record
|
||||||
|
self.cleanup_dns_record(base_domain, &record_id).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for order to be ready
|
||||||
|
info!("Waiting for order to be ready...");
|
||||||
|
let start = Instant::now();
|
||||||
|
let timeout = Duration::from_secs(300);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if start.elapsed() > timeout {
|
||||||
|
return Err(AcmeError::Challenge("Order processing timeout".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
order
|
||||||
|
.refresh()
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::OrderCreation(e.to_string()))?;
|
||||||
|
|
||||||
|
match order.state().status {
|
||||||
|
OrderStatus::Ready => {
|
||||||
|
info!("Order is ready for finalization");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
OrderStatus::Invalid => {
|
||||||
|
return Err(AcmeError::Challenge("Order became invalid".to_string()));
|
||||||
|
}
|
||||||
|
OrderStatus::Pending => {
|
||||||
|
debug!("Order still pending, waiting...");
|
||||||
|
sleep(Duration::from_secs(5)).await;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
debug!("Order status: {:?}", order.state().status);
|
||||||
|
sleep(Duration::from_secs(5)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate CSR
|
||||||
|
info!("Generating certificate signing request...");
|
||||||
|
let mut params = CertificateParams::new(vec![domain.to_string()]);
|
||||||
|
|
||||||
|
params.distinguished_name = DistinguishedName::new();
|
||||||
|
|
||||||
|
let key_pair = KeyPair::generate(&rcgen::PKCS_ECDSA_P256_SHA256)
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?;
|
||||||
|
|
||||||
|
// Set the key pair for CSR generation
|
||||||
|
params.key_pair = Some(key_pair);
|
||||||
|
|
||||||
|
// Generate CSR using rcgen certificate
|
||||||
|
let cert = rcgen::Certificate::from_params(params)
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?;
|
||||||
|
let csr_der = cert
|
||||||
|
.serialize_request_der()
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?;
|
||||||
|
|
||||||
|
// Finalize order with CSR
|
||||||
|
info!("Finalizing order with CSR...");
|
||||||
|
order
|
||||||
|
.finalize_csr(&csr_der)
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?;
|
||||||
|
|
||||||
|
// Wait for certificate to be ready
|
||||||
|
info!("Waiting for certificate to be generated...");
|
||||||
|
let start = Instant::now();
|
||||||
|
let timeout = Duration::from_secs(300); // 5 minutes
|
||||||
|
|
||||||
|
let cert_chain_pem = loop {
|
||||||
|
if start.elapsed() > timeout {
|
||||||
|
return Err(AcmeError::CertificateGeneration(
|
||||||
|
"Certificate generation timeout".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
order
|
||||||
|
.refresh()
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?;
|
||||||
|
|
||||||
|
match order.state().status {
|
||||||
|
OrderStatus::Valid => {
|
||||||
|
info!("Certificate is ready!");
|
||||||
|
break order
|
||||||
|
.certificate()
|
||||||
|
.await
|
||||||
|
.map_err(|e| AcmeError::CertificateGeneration(e.to_string()))?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
AcmeError::CertificateGeneration(
|
||||||
|
"Certificate not available".to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
OrderStatus::Invalid => {
|
||||||
|
return Err(AcmeError::CertificateGeneration(
|
||||||
|
"Order became invalid during certificate generation".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
OrderStatus::Processing => {
|
||||||
|
debug!("Certificate still being processed, waiting...");
|
||||||
|
sleep(Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
debug!(
|
||||||
|
"Waiting for certificate, order status: {:?}",
|
||||||
|
order.state().status
|
||||||
|
);
|
||||||
|
sleep(Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let private_key_pem = cert.serialize_private_key_pem();
|
||||||
|
|
||||||
|
info!("Certificate successfully obtained!");
|
||||||
|
Ok((cert_chain_pem, private_key_pem))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn wait_for_dns_propagation(
|
||||||
|
&self,
|
||||||
|
record_name: &str,
|
||||||
|
expected_value: &str,
|
||||||
|
) -> Result<(), AcmeError> {
|
||||||
|
info!("Checking DNS propagation for: {}", record_name);
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
|
let timeout = Duration::from_secs(120); // 2 minutes
|
||||||
|
|
||||||
|
while start.elapsed() < timeout {
|
||||||
|
match self.check_dns_txt_record(record_name, expected_value).await {
|
||||||
|
Ok(true) => {
|
||||||
|
info!("DNS propagation confirmed");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Ok(false) => {
|
||||||
|
debug!("DNS not yet propagated, waiting...");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
debug!("DNS check failed: {:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sleep(Duration::from_secs(10)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
warn!("DNS propagation timeout, but continuing anyway");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn check_dns_txt_record(
|
||||||
|
&self,
|
||||||
|
record_name: &str,
|
||||||
|
expected_value: &str,
|
||||||
|
) -> Result<bool, AcmeError> {
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
let output = Command::new("dig")
|
||||||
|
.args(&["+short", "TXT", record_name])
|
||||||
|
.output()
|
||||||
|
.map_err(|e| AcmeError::Io(e))?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(AcmeError::Challenge("dig command failed".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let stdout = String::from_utf8(output.stdout)
|
||||||
|
.map_err(|_| AcmeError::Challenge("Invalid UTF-8 in dig output".to_string()))?;
|
||||||
|
|
||||||
|
// Parse TXT record (remove quotes)
|
||||||
|
for line in stdout.lines() {
|
||||||
|
let cleaned = line.trim().trim_matches('"');
|
||||||
|
if cleaned == expected_value {
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn cleanup_dns_record(&self, base_domain: &str, record_id: &str) {
|
||||||
|
if let Err(e) = self
|
||||||
|
.cloudflare
|
||||||
|
.delete_txt_record(base_domain, record_id)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
warn!("Failed to cleanup DNS record {}: {:?}", record_id, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the base domain from a full domain (e.g., "api.example.com" -> "example.com")
|
||||||
|
pub fn get_base_domain(domain: &str) -> Result<String, AcmeError> {
|
||||||
|
let parts: Vec<&str> = domain.split('.').collect();
|
||||||
|
if parts.len() < 2 {
|
||||||
|
return Err(AcmeError::InvalidDomain(
|
||||||
|
"Domain must have at least 2 parts".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Take the last two parts for base domain
|
||||||
|
let base_domain = format!("{}.{}", parts[parts.len() - 2], parts[parts.len() - 1]);
|
||||||
|
Ok(base_domain)
|
||||||
|
}
|
||||||
|
}
|
||||||
242
src/services/acme/cloudflare.rs
Normal file
242
src/services/acme/cloudflare.rs
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::time::Duration;
|
||||||
|
use tracing::{debug, info};
|
||||||
|
|
||||||
|
use crate::services::acme::error::AcmeError;
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CloudflareZone {
|
||||||
|
id: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CloudflareZonesResponse {
|
||||||
|
result: Vec<CloudflareZone>,
|
||||||
|
success: bool,
|
||||||
|
errors: Option<Vec<CloudflareApiError>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CloudflareDnsRecord {
|
||||||
|
id: String,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
record_type: String,
|
||||||
|
name: String,
|
||||||
|
content: String,
|
||||||
|
ttl: u32,
|
||||||
|
proxied: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CloudflareDnsRecordsResponse {
|
||||||
|
result: Vec<CloudflareDnsRecord>,
|
||||||
|
success: bool,
|
||||||
|
errors: Option<Vec<CloudflareApiError>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct CreateDnsRecordRequest {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
record_type: String,
|
||||||
|
name: String,
|
||||||
|
content: String,
|
||||||
|
ttl: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CreateDnsRecordResponse {
|
||||||
|
result: CloudflareDnsRecord,
|
||||||
|
success: bool,
|
||||||
|
errors: Option<Vec<CloudflareApiError>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct CloudflareApiError {
|
||||||
|
code: u32,
|
||||||
|
message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CloudflareClient {
|
||||||
|
client: reqwest::Client,
|
||||||
|
api_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CloudflareClient {
|
||||||
|
pub fn new(api_token: String) -> Result<Self, AcmeError> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| AcmeError::HttpRequest(e))?;
|
||||||
|
|
||||||
|
Ok(Self { client, api_token })
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_zone_id(&self, domain: &str) -> Result<String, AcmeError> {
|
||||||
|
info!("Getting Cloudflare zone ID for domain: {}", domain);
|
||||||
|
|
||||||
|
let url = format!("https://api.cloudflare.com/client/v4/zones?name={}", domain);
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_token))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text().await.unwrap_or_default();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"HTTP {}: {}",
|
||||||
|
status, body
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let zones: CloudflareZonesResponse = response.json().await?;
|
||||||
|
|
||||||
|
if !zones.success {
|
||||||
|
let errors = zones.errors.unwrap_or_default();
|
||||||
|
let error_messages: Vec<String> = errors.iter().map(|e| e.message.clone()).collect();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"API errors: {}",
|
||||||
|
error_messages.join(", ")
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
zones
|
||||||
|
.result
|
||||||
|
.into_iter()
|
||||||
|
.find(|z| z.name == domain)
|
||||||
|
.map(|z| z.id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
AcmeError::CloudflareApi(format!("Zone not found for domain: {}", domain))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_txt_record(
|
||||||
|
&self,
|
||||||
|
domain: &str,
|
||||||
|
record_name: &str,
|
||||||
|
content: &str,
|
||||||
|
) -> Result<String, AcmeError> {
|
||||||
|
let zone_id = self.get_zone_id(domain).await?;
|
||||||
|
info!("Creating TXT record {} in zone {}", record_name, domain);
|
||||||
|
|
||||||
|
let request = CreateDnsRecordRequest {
|
||||||
|
record_type: "TXT".to_string(),
|
||||||
|
name: record_name.to_string(),
|
||||||
|
content: content.to_string(),
|
||||||
|
ttl: 120, // 2 minutes TTL for quick propagation
|
||||||
|
};
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://api.cloudflare.com/client/v4/zones/{}/dns_records",
|
||||||
|
zone_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.post(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_token))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&request)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text().await.unwrap_or_default();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"Failed to create DNS record ({}): {}",
|
||||||
|
status, body
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result: CreateDnsRecordResponse = response.json().await?;
|
||||||
|
|
||||||
|
if !result.success {
|
||||||
|
let errors = result.errors.unwrap_or_default();
|
||||||
|
let error_messages: Vec<String> = errors.iter().map(|e| e.message.clone()).collect();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"Failed to create record: {}",
|
||||||
|
error_messages.join(", ")
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Created DNS record with ID: {}", result.result.id);
|
||||||
|
Ok(result.result.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_txt_record(&self, domain: &str, record_id: &str) -> Result<(), AcmeError> {
|
||||||
|
let zone_id = self.get_zone_id(domain).await?;
|
||||||
|
info!("Deleting TXT record {} from zone {}", record_id, domain);
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://api.cloudflare.com/client/v4/zones/{}/dns_records/{}",
|
||||||
|
zone_id, record_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.delete(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_token))
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text().await.unwrap_or_default();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"Failed to delete DNS record ({}): {}",
|
||||||
|
status, body
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Successfully deleted DNS record");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_txt_record(
|
||||||
|
&self,
|
||||||
|
domain: &str,
|
||||||
|
record_name: &str,
|
||||||
|
) -> Result<Option<String>, AcmeError> {
|
||||||
|
let zone_id = self.get_zone_id(domain).await?;
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://api.cloudflare.com/client/v4/zones/{}/dns_records?type=TXT&name={}",
|
||||||
|
zone_id, record_name
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_token))
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text().await.unwrap_or_default();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"Failed to list DNS records ({}): {}",
|
||||||
|
status, body
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let records: CloudflareDnsRecordsResponse = response.json().await?;
|
||||||
|
|
||||||
|
if !records.success {
|
||||||
|
let errors = records.errors.unwrap_or_default();
|
||||||
|
let error_messages: Vec<String> = errors.iter().map(|e| e.message.clone()).collect();
|
||||||
|
return Err(AcmeError::CloudflareApi(format!(
|
||||||
|
"Failed to list records: {}",
|
||||||
|
error_messages.join(", ")
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(records.result.first().map(|r| r.id.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
40
src/services/acme/error.rs
Normal file
40
src/services/acme/error.rs
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum AcmeError {
|
||||||
|
#[error("ACME account creation failed: {0}")]
|
||||||
|
AccountCreation(String),
|
||||||
|
|
||||||
|
#[error("ACME order creation failed: {0}")]
|
||||||
|
OrderCreation(String),
|
||||||
|
|
||||||
|
#[error("ACME challenge failed: {0}")]
|
||||||
|
Challenge(String),
|
||||||
|
|
||||||
|
#[error("DNS propagation timeout")]
|
||||||
|
DnsPropagationTimeout,
|
||||||
|
|
||||||
|
#[error("Certificate generation failed: {0}")]
|
||||||
|
CertificateGeneration(String),
|
||||||
|
|
||||||
|
#[error("Cloudflare API error: {0}")]
|
||||||
|
CloudflareApi(String),
|
||||||
|
|
||||||
|
#[error("DNS provider not found")]
|
||||||
|
DnsProviderNotFound,
|
||||||
|
|
||||||
|
#[error("Invalid domain: {0}")]
|
||||||
|
InvalidDomain(String),
|
||||||
|
|
||||||
|
#[error("HTTP request failed: {0}")]
|
||||||
|
HttpRequest(#[from] reqwest::Error),
|
||||||
|
|
||||||
|
#[error("JSON parsing failed: {0}")]
|
||||||
|
JsonParsing(#[from] serde_json::Error),
|
||||||
|
|
||||||
|
#[error("IO error: {0}")]
|
||||||
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
#[error("Instant ACME error: {0}")]
|
||||||
|
InstantAcme(String),
|
||||||
|
}
|
||||||
7
src/services/acme/mod.rs
Normal file
7
src/services/acme/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
pub mod client;
|
||||||
|
pub mod cloudflare;
|
||||||
|
pub mod error;
|
||||||
|
|
||||||
|
pub use client::AcmeClient;
|
||||||
|
pub use cloudflare::CloudflareClient;
|
||||||
|
pub use error::AcmeError;
|
||||||
266
src/services/certificates.rs
Normal file
266
src/services/certificates.rs
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
use rcgen::{
|
||||||
|
Certificate, CertificateParams, DistinguishedName, DnType, KeyPair, SanType,
|
||||||
|
PKCS_ECDSA_P256_SHA256,
|
||||||
|
};
|
||||||
|
use std::net::IpAddr;
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::database::entities::dns_provider::DnsProviderType;
|
||||||
|
use crate::database::repository::DnsProviderRepository;
|
||||||
|
use crate::services::acme::{AcmeClient, AcmeError};
|
||||||
|
use sea_orm::DatabaseConnection;
|
||||||
|
|
||||||
|
/// Certificate management service
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CertificateService {
|
||||||
|
db: Option<DatabaseConnection>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl CertificateService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { db: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_db(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db: Some(db) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate self-signed certificate optimized for Xray
|
||||||
|
pub async fn generate_self_signed(&self, domain: &str) -> anyhow::Result<(String, String)> {
|
||||||
|
tracing::info!("Generating self-signed certificate for domain: {}", domain);
|
||||||
|
|
||||||
|
// Create certificate parameters with ECDSA (recommended for Xray)
|
||||||
|
let mut params = CertificateParams::new(vec![domain.to_string()]);
|
||||||
|
|
||||||
|
// Use ECDSA P-256 which is recommended for Xray (equivalent to RSA-3072 in strength)
|
||||||
|
params.alg = &PKCS_ECDSA_P256_SHA256;
|
||||||
|
|
||||||
|
// Generate ECDSA key pair
|
||||||
|
let key_pair = KeyPair::generate(&PKCS_ECDSA_P256_SHA256)?;
|
||||||
|
params.key_pair = Some(key_pair);
|
||||||
|
|
||||||
|
// Set certificate subject with proper fields
|
||||||
|
let mut distinguished_name = DistinguishedName::new();
|
||||||
|
distinguished_name.push(DnType::CommonName, domain);
|
||||||
|
distinguished_name.push(DnType::OrganizationName, "OutFleet");
|
||||||
|
distinguished_name.push(DnType::OrganizationalUnitName, "VPN");
|
||||||
|
distinguished_name.push(DnType::CountryName, "US");
|
||||||
|
distinguished_name.push(DnType::StateOrProvinceName, "State");
|
||||||
|
distinguished_name.push(DnType::LocalityName, "City");
|
||||||
|
params.distinguished_name = distinguished_name;
|
||||||
|
|
||||||
|
// Add comprehensive Subject Alternative Names for better compatibility
|
||||||
|
let mut san_list = vec![
|
||||||
|
SanType::DnsName(domain.to_string()),
|
||||||
|
SanType::DnsName("localhost".to_string()),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Add IP addresses if domain looks like an IP
|
||||||
|
if let Ok(ip) = domain.parse::<IpAddr>() {
|
||||||
|
san_list.push(SanType::IpAddress(ip));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always add localhost IP for local testing
|
||||||
|
san_list.push(SanType::IpAddress(IpAddr::V4(std::net::Ipv4Addr::new(
|
||||||
|
127, 0, 0, 1,
|
||||||
|
))));
|
||||||
|
|
||||||
|
// If domain is not an IP, also add wildcard subdomain
|
||||||
|
if domain.parse::<IpAddr>().is_err() && !domain.starts_with("*.") {
|
||||||
|
san_list.push(SanType::DnsName(format!("*.{}", domain)));
|
||||||
|
}
|
||||||
|
|
||||||
|
params.subject_alt_names = san_list;
|
||||||
|
|
||||||
|
// Set validity period (1 year as recommended)
|
||||||
|
params.not_before = OffsetDateTime::now_utc();
|
||||||
|
params.not_after = OffsetDateTime::now_utc() + Duration::days(365);
|
||||||
|
|
||||||
|
// Set serial number
|
||||||
|
params.serial_number = Some(rcgen::SerialNumber::from_slice(&[1, 2, 3, 4]));
|
||||||
|
|
||||||
|
// Generate certificate
|
||||||
|
let cert = Certificate::from_params(params)?;
|
||||||
|
|
||||||
|
// Get PEM format with proper formatting
|
||||||
|
let cert_pem = cert.serialize_pem()?;
|
||||||
|
let key_pem = cert.serialize_private_key_pem();
|
||||||
|
|
||||||
|
// Validate PEM format
|
||||||
|
if !cert_pem.starts_with("-----BEGIN CERTIFICATE-----")
|
||||||
|
|| !cert_pem.ends_with("-----END CERTIFICATE-----\n")
|
||||||
|
{
|
||||||
|
return Err(anyhow::anyhow!("Invalid certificate PEM format"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !key_pem.starts_with("-----BEGIN") || !key_pem.contains("PRIVATE KEY-----") {
|
||||||
|
return Err(anyhow::anyhow!("Invalid private key PEM format"));
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::debug!("Generated ECDSA P-256 certificate for domain: {}", domain);
|
||||||
|
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate Let's Encrypt certificate using DNS challenge
|
||||||
|
pub async fn generate_letsencrypt_certificate(
|
||||||
|
&self,
|
||||||
|
domain: &str,
|
||||||
|
dns_provider_id: Uuid,
|
||||||
|
acme_email: &str,
|
||||||
|
staging: bool,
|
||||||
|
) -> Result<(String, String), AcmeError> {
|
||||||
|
tracing::info!(
|
||||||
|
"Generating Let's Encrypt certificate for domain: {} using DNS challenge",
|
||||||
|
domain
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get database connection
|
||||||
|
let db = self
|
||||||
|
.db
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| AcmeError::DnsProviderNotFound)?;
|
||||||
|
|
||||||
|
// Get DNS provider
|
||||||
|
let dns_repo = DnsProviderRepository::new(db.clone());
|
||||||
|
let dns_provider = dns_repo
|
||||||
|
.find_by_id(dns_provider_id)
|
||||||
|
.await
|
||||||
|
.map_err(|_| AcmeError::DnsProviderNotFound)?
|
||||||
|
.ok_or_else(|| AcmeError::DnsProviderNotFound)?;
|
||||||
|
|
||||||
|
// Verify provider is Cloudflare (only supported provider for now)
|
||||||
|
if dns_provider.provider_type != DnsProviderType::Cloudflare.as_str() {
|
||||||
|
return Err(AcmeError::CloudflareApi(
|
||||||
|
"Only Cloudflare provider is supported".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !dns_provider.is_active {
|
||||||
|
return Err(AcmeError::DnsProviderNotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine ACME directory URL
|
||||||
|
let directory_url = if staging {
|
||||||
|
"https://acme-staging-v02.api.letsencrypt.org/directory"
|
||||||
|
} else {
|
||||||
|
"https://acme-v02.api.letsencrypt.org/directory"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create ACME client
|
||||||
|
let mut acme_client = AcmeClient::new(
|
||||||
|
dns_provider.api_token.clone(),
|
||||||
|
acme_email,
|
||||||
|
directory_url.to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Get base domain for DNS operations
|
||||||
|
let base_domain = AcmeClient::get_base_domain(domain)?;
|
||||||
|
|
||||||
|
// Generate certificate
|
||||||
|
let (cert_pem, key_pem) = acme_client.get_certificate(domain, &base_domain).await?;
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
"Successfully generated Let's Encrypt certificate for domain: {}",
|
||||||
|
domain
|
||||||
|
);
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renew certificate by ID (used for manual renewal)
|
||||||
|
pub async fn renew_certificate_by_id(&self, cert_id: Uuid) -> anyhow::Result<(String, String)> {
|
||||||
|
let db = self
|
||||||
|
.db
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Database connection not available"))?;
|
||||||
|
|
||||||
|
// Get the certificate from database
|
||||||
|
let cert_repo = crate::database::repository::CertificateRepository::new(db.clone());
|
||||||
|
let certificate = cert_repo
|
||||||
|
.find_by_id(cert_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Certificate not found"))?;
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
"Renewing certificate '{}' for domain: {}",
|
||||||
|
certificate.name,
|
||||||
|
certificate.domain
|
||||||
|
);
|
||||||
|
|
||||||
|
match certificate.cert_type.as_str() {
|
||||||
|
"letsencrypt" => {
|
||||||
|
// For Let's Encrypt, we need to regenerate using ACME
|
||||||
|
// Find an active Cloudflare DNS provider
|
||||||
|
let dns_repo = crate::database::repository::DnsProviderRepository::new(db.clone());
|
||||||
|
let providers = dns_repo.find_active_by_type("cloudflare").await?;
|
||||||
|
|
||||||
|
if providers.is_empty() {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"No active Cloudflare DNS provider found for Let's Encrypt renewal"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let dns_provider = &providers[0];
|
||||||
|
let acme_email = "admin@example.com"; // TODO: Store this with certificate
|
||||||
|
|
||||||
|
// Generate new certificate
|
||||||
|
let (cert_pem, key_pem) = self
|
||||||
|
.generate_letsencrypt_certificate(
|
||||||
|
&certificate.domain,
|
||||||
|
dns_provider.id,
|
||||||
|
acme_email,
|
||||||
|
false, // Production
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Update in database
|
||||||
|
cert_repo
|
||||||
|
.update_certificate_data(
|
||||||
|
cert_id,
|
||||||
|
&cert_pem,
|
||||||
|
&key_pem,
|
||||||
|
chrono::Utc::now() + chrono::Duration::days(90),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
"self_signed" => {
|
||||||
|
// For self-signed, generate a new one
|
||||||
|
let (cert_pem, key_pem) = self.generate_self_signed(&certificate.domain).await?;
|
||||||
|
|
||||||
|
// Update in database
|
||||||
|
cert_repo
|
||||||
|
.update_certificate_data(
|
||||||
|
cert_id,
|
||||||
|
&cert_pem,
|
||||||
|
&key_pem,
|
||||||
|
chrono::Utc::now() + chrono::Duration::days(365),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((cert_pem, key_pem))
|
||||||
|
}
|
||||||
|
_ => Err(anyhow::anyhow!(
|
||||||
|
"Cannot renew imported certificates automatically"
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renew certificate (legacy method for backward compatibility)
|
||||||
|
pub async fn renew_certificate(&self, domain: &str) -> anyhow::Result<(String, String)> {
|
||||||
|
tracing::info!("Renewing certificate for domain: {}", domain);
|
||||||
|
|
||||||
|
// For backward compatibility, just generate a new self-signed certificate
|
||||||
|
self.generate_self_signed(domain).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for CertificateService {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
30
src/services/events.rs
Normal file
30
src/services/events.rs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
use std::sync::OnceLock;
|
||||||
|
use tokio::sync::broadcast;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum SyncEvent {
|
||||||
|
InboundChanged(Uuid), // server_id
|
||||||
|
UserAccessChanged(Uuid), // server_id
|
||||||
|
}
|
||||||
|
|
||||||
|
static EVENT_SENDER: OnceLock<broadcast::Sender<SyncEvent>> = OnceLock::new();
|
||||||
|
|
||||||
|
/// Initialize the event bus and return a receiver
|
||||||
|
pub fn init_event_bus() -> broadcast::Receiver<SyncEvent> {
|
||||||
|
let (tx, rx) = broadcast::channel(100);
|
||||||
|
EVENT_SENDER.set(tx).expect("Event bus already initialized");
|
||||||
|
rx
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a sync event (non-blocking)
|
||||||
|
pub fn send_sync_event(event: SyncEvent) {
|
||||||
|
if let Some(sender) = EVENT_SENDER.get() {
|
||||||
|
match sender.send(event.clone()) {
|
||||||
|
Ok(_) => tracing::info!("Event sent: {:?}", event),
|
||||||
|
Err(_) => tracing::warn!("No event receivers"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::error!("Event bus not initialized");
|
||||||
|
}
|
||||||
|
}
|
||||||
12
src/services/mod.rs
Normal file
12
src/services/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pub mod acme;
|
||||||
|
pub mod certificates;
|
||||||
|
pub mod events;
|
||||||
|
pub mod tasks;
|
||||||
|
pub mod telegram;
|
||||||
|
pub mod uri_generator;
|
||||||
|
pub mod xray;
|
||||||
|
|
||||||
|
pub use tasks::TaskScheduler;
|
||||||
|
pub use telegram::TelegramService;
|
||||||
|
pub use uri_generator::UriGeneratorService;
|
||||||
|
pub use xray::XrayService;
|
||||||
741
src/services/tasks.rs
Normal file
741
src/services/tasks.rs
Normal file
@@ -0,0 +1,741 @@
|
|||||||
|
use crate::database::repository::{
|
||||||
|
CertificateRepository, InboundTemplateRepository, InboundUsersRepository,
|
||||||
|
ServerInboundRepository, ServerRepository, UserRepository,
|
||||||
|
};
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
use crate::services::events::SyncEvent;
|
||||||
|
use crate::services::XrayService;
|
||||||
|
use anyhow::Result;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use tokio_cron_scheduler::{Job, JobScheduler};
|
||||||
|
use tracing::{debug, error, info, warn};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub struct TaskScheduler {
|
||||||
|
scheduler: JobScheduler,
|
||||||
|
task_status: Arc<RwLock<HashMap<String, TaskStatus>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Status of a background task
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TaskStatus {
|
||||||
|
pub name: String,
|
||||||
|
pub description: String,
|
||||||
|
pub schedule: String,
|
||||||
|
pub status: TaskState,
|
||||||
|
pub last_run: Option<DateTime<Utc>>,
|
||||||
|
pub next_run: Option<DateTime<Utc>>,
|
||||||
|
pub total_runs: u64,
|
||||||
|
pub success_count: u64,
|
||||||
|
pub error_count: u64,
|
||||||
|
pub last_error: Option<String>,
|
||||||
|
pub last_duration_ms: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum TaskState {
|
||||||
|
Idle,
|
||||||
|
Running,
|
||||||
|
Success,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaskScheduler {
|
||||||
|
pub async fn new() -> Result<Self> {
|
||||||
|
let scheduler = JobScheduler::new().await?;
|
||||||
|
let task_status = Arc::new(RwLock::new(HashMap::new()));
|
||||||
|
Ok(Self {
|
||||||
|
scheduler,
|
||||||
|
task_status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get current status of all tasks
|
||||||
|
pub fn get_task_status(&self) -> HashMap<String, TaskStatus> {
|
||||||
|
self.task_status.read().unwrap().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start event-driven sync handler
|
||||||
|
pub async fn start_event_handler(
|
||||||
|
db: DatabaseManager,
|
||||||
|
mut event_receiver: tokio::sync::broadcast::Receiver<SyncEvent>,
|
||||||
|
) {
|
||||||
|
let xray_service = XrayService::new();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
while let Ok(event) = event_receiver.recv().await {
|
||||||
|
match event {
|
||||||
|
SyncEvent::InboundChanged(server_id)
|
||||||
|
| SyncEvent::UserAccessChanged(server_id) => {
|
||||||
|
if let Err(e) =
|
||||||
|
sync_single_server_by_id(&xray_service, &db, server_id).await
|
||||||
|
{
|
||||||
|
// Get server name for better logging
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let server_name = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server.name,
|
||||||
|
_ => server_id.to_string(),
|
||||||
|
};
|
||||||
|
error!(
|
||||||
|
"Failed to sync server '{}' ({}) from event: {}",
|
||||||
|
server_name, server_id, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn start(&mut self, db: DatabaseManager, xray_service: XrayService) -> Result<()> {
|
||||||
|
// Initialize task status
|
||||||
|
{
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
status.insert(
|
||||||
|
"xray_sync".to_string(),
|
||||||
|
TaskStatus {
|
||||||
|
name: "Xray Synchronization".to_string(),
|
||||||
|
description: "Synchronizes database state with xray servers".to_string(),
|
||||||
|
schedule: "0 * * * * * (every minute)".to_string(),
|
||||||
|
status: TaskState::Idle,
|
||||||
|
last_run: None,
|
||||||
|
next_run: Some(Utc::now() + chrono::Duration::minutes(1)),
|
||||||
|
total_runs: 0,
|
||||||
|
success_count: 0,
|
||||||
|
error_count: 0,
|
||||||
|
last_error: None,
|
||||||
|
last_duration_ms: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run initial sync in background to avoid blocking startup
|
||||||
|
let db_initial = db.clone();
|
||||||
|
let xray_service_initial = xray_service.clone();
|
||||||
|
let task_status_initial = self.task_status.clone();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
info!("Starting initial xray sync in background...");
|
||||||
|
let start_time = Utc::now();
|
||||||
|
|
||||||
|
// Update status to running
|
||||||
|
{
|
||||||
|
let mut status = task_status_initial.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Running;
|
||||||
|
task.last_run = Some(start_time);
|
||||||
|
task.total_runs += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match sync_xray_state(db_initial, xray_service_initial).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status_initial.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Success;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
info!("Initial xray sync completed successfully in {}ms", duration);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status_initial.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = Some(e.to_string());
|
||||||
|
}
|
||||||
|
error!("Initial xray sync failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add synchronization task that runs every minute
|
||||||
|
let db_clone = db.clone();
|
||||||
|
let xray_service_clone = xray_service.clone();
|
||||||
|
let task_status_clone = self.task_status.clone();
|
||||||
|
|
||||||
|
let sync_job = Job::new_async("0 */5 * * * *", move |_uuid, _l| {
|
||||||
|
let db = db_clone.clone();
|
||||||
|
let xray_service = xray_service_clone.clone();
|
||||||
|
let task_status = task_status_clone.clone();
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let start_time = Utc::now();
|
||||||
|
|
||||||
|
// Update status to running
|
||||||
|
{
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Running;
|
||||||
|
task.last_run = Some(start_time);
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.next_run = Some(start_time + chrono::Duration::minutes(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match sync_xray_state(db, xray_service).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Success;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("xray_sync") {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = Some(e.to_string());
|
||||||
|
}
|
||||||
|
error!("Scheduled xray sync failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
|
self.scheduler.add(sync_job).await?;
|
||||||
|
|
||||||
|
// Add certificate renewal task that runs once a day at 2 AM
|
||||||
|
let db_clone_cert = db.clone();
|
||||||
|
let task_status_cert = self.task_status.clone();
|
||||||
|
|
||||||
|
// Initialize certificate renewal task status
|
||||||
|
{
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
status.insert(
|
||||||
|
"cert_renewal".to_string(),
|
||||||
|
TaskStatus {
|
||||||
|
name: "Certificate Renewal".to_string(),
|
||||||
|
description: "Renews Let's Encrypt certificates that expire within 15 days"
|
||||||
|
.to_string(),
|
||||||
|
schedule: "0 0 2 * * * (daily at 2 AM)".to_string(),
|
||||||
|
status: TaskState::Idle,
|
||||||
|
last_run: None,
|
||||||
|
next_run: Some(Utc::now() + chrono::Duration::days(1)),
|
||||||
|
total_runs: 0,
|
||||||
|
success_count: 0,
|
||||||
|
error_count: 0,
|
||||||
|
last_error: None,
|
||||||
|
last_duration_ms: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let cert_renewal_job = Job::new_async("0 0 2 * * *", move |_uuid, _l| {
|
||||||
|
let db = db_clone_cert.clone();
|
||||||
|
let task_status = task_status_cert.clone();
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
let start_time = Utc::now();
|
||||||
|
|
||||||
|
// Update task status to running
|
||||||
|
{
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("cert_renewal") {
|
||||||
|
task.status = TaskState::Running;
|
||||||
|
task.last_run = Some(Utc::now());
|
||||||
|
task.total_runs += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match check_and_renew_certificates(&db).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("cert_renewal") {
|
||||||
|
task.status = TaskState::Success;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let duration = (Utc::now() - start_time).num_milliseconds() as u64;
|
||||||
|
let mut status = task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut("cert_renewal") {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = Some(duration);
|
||||||
|
task.last_error = Some(e.to_string());
|
||||||
|
}
|
||||||
|
error!("Certificate renewal task failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
|
self.scheduler.add(cert_renewal_job).await?;
|
||||||
|
|
||||||
|
// Also run certificate check on startup
|
||||||
|
info!("Running initial certificate renewal check...");
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Err(e) = check_and_renew_certificates(&db).await {
|
||||||
|
error!("Initial certificate renewal check failed: {}", e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
self.scheduler.start().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_task_status(&self, task_id: &str, state: TaskState, duration_ms: Option<u64>) {
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut(task_id) {
|
||||||
|
task.status = state;
|
||||||
|
task.last_run = Some(Utc::now());
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.success_count += 1;
|
||||||
|
task.last_duration_ms = duration_ms;
|
||||||
|
task.last_error = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_task_status_with_error(
|
||||||
|
&self,
|
||||||
|
task_id: &str,
|
||||||
|
error: String,
|
||||||
|
duration_ms: Option<u64>,
|
||||||
|
) {
|
||||||
|
let mut status = self.task_status.write().unwrap();
|
||||||
|
if let Some(task) = status.get_mut(task_id) {
|
||||||
|
task.status = TaskState::Error;
|
||||||
|
task.last_run = Some(Utc::now());
|
||||||
|
task.total_runs += 1;
|
||||||
|
task.error_count += 1;
|
||||||
|
task.last_duration_ms = duration_ms;
|
||||||
|
task.last_error = Some(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(&mut self) -> Result<()> {
|
||||||
|
self.scheduler.shutdown().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Synchronize xray server state with database state
|
||||||
|
async fn sync_xray_state(db: DatabaseManager, xray_service: XrayService) -> Result<()> {
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Get all servers from database
|
||||||
|
let servers = match server_repo.find_all().await {
|
||||||
|
Ok(servers) => servers,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to fetch servers: {}", e);
|
||||||
|
return Err(e.into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for server in servers {
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
|
||||||
|
// Test connection first
|
||||||
|
match xray_service.test_connection(server.id, &endpoint).await {
|
||||||
|
Ok(false) => {
|
||||||
|
warn!(
|
||||||
|
"Cannot connect to server {} at {}, skipping",
|
||||||
|
server.name, endpoint
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error testing connection to server {}: {}", server.name, e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get desired inbounds from database
|
||||||
|
let desired_inbounds =
|
||||||
|
match get_desired_inbounds_from_db(&db, &server, &inbound_repo, &template_repo).await {
|
||||||
|
Ok(inbounds) => inbounds,
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
"Failed to get desired inbounds for server {}: {}",
|
||||||
|
server.name, e
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Synchronize inbounds
|
||||||
|
if let Err(e) =
|
||||||
|
sync_server_inbounds(&xray_service, server.id, &endpoint, &desired_inbounds).await
|
||||||
|
{
|
||||||
|
error!("Failed to sync inbounds for server {}: {}", server.name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get desired inbounds configuration from database
|
||||||
|
async fn get_desired_inbounds_from_db(
|
||||||
|
db: &DatabaseManager,
|
||||||
|
server: &crate::database::entities::server::Model,
|
||||||
|
inbound_repo: &ServerInboundRepository,
|
||||||
|
template_repo: &InboundTemplateRepository,
|
||||||
|
) -> Result<HashMap<String, DesiredInbound>> {
|
||||||
|
// Get all inbounds for this server
|
||||||
|
let inbounds = inbound_repo.find_by_server_id(server.id).await?;
|
||||||
|
let mut desired_inbounds = HashMap::new();
|
||||||
|
|
||||||
|
for inbound in inbounds {
|
||||||
|
// Get template for this inbound
|
||||||
|
let template = match template_repo.find_by_id(inbound.template_id).await? {
|
||||||
|
Some(template) => template,
|
||||||
|
None => {
|
||||||
|
warn!(
|
||||||
|
"Template {} not found for inbound {}, skipping",
|
||||||
|
inbound.template_id, inbound.tag
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get users for this inbound
|
||||||
|
let users = get_users_for_inbound(db, inbound.id).await?;
|
||||||
|
|
||||||
|
// Get port from template or override
|
||||||
|
let port = inbound.port_override.unwrap_or(template.default_port);
|
||||||
|
|
||||||
|
// Get certificate if specified
|
||||||
|
let (cert_pem, key_pem) = if let Some(cert_id) = inbound.certificate_id {
|
||||||
|
match load_certificate_from_db(db, inbound.certificate_id).await {
|
||||||
|
Ok((cert, key)) => {
|
||||||
|
// Get certificate name for better logging
|
||||||
|
let cert_repo = CertificateRepository::new(db.connection().clone());
|
||||||
|
let cert_name = match cert_repo.find_by_id(cert_id).await {
|
||||||
|
Ok(Some(cert)) => cert.name,
|
||||||
|
_ => cert_id.to_string(),
|
||||||
|
};
|
||||||
|
info!(
|
||||||
|
"Loaded certificate '{}' ({}) for inbound '{}' on server '{}', has_cert={}, has_key={}",
|
||||||
|
cert_name,
|
||||||
|
cert_id,
|
||||||
|
inbound.tag,
|
||||||
|
server.name,
|
||||||
|
cert.is_some(),
|
||||||
|
key.is_some()
|
||||||
|
);
|
||||||
|
(cert, key)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Get certificate name for better logging
|
||||||
|
let cert_repo = CertificateRepository::new(db.connection().clone());
|
||||||
|
let cert_name = match cert_repo.find_by_id(cert_id).await {
|
||||||
|
Ok(Some(cert)) => cert.name,
|
||||||
|
_ => cert_id.to_string(),
|
||||||
|
};
|
||||||
|
warn!(
|
||||||
|
"Failed to load certificate '{}' ({}) for inbound '{}' on server '{}': {}",
|
||||||
|
cert_name, cert_id, inbound.tag, server.name, e
|
||||||
|
);
|
||||||
|
(None, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(
|
||||||
|
"No certificate configured for inbound '{}' on server '{}'",
|
||||||
|
inbound.tag, server.name
|
||||||
|
);
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let desired_inbound = DesiredInbound {
|
||||||
|
tag: inbound.tag.clone(),
|
||||||
|
port,
|
||||||
|
protocol: template.protocol.clone(),
|
||||||
|
settings: template.base_settings.clone(),
|
||||||
|
stream_settings: template.stream_settings.clone(),
|
||||||
|
users,
|
||||||
|
cert_pem,
|
||||||
|
key_pem,
|
||||||
|
};
|
||||||
|
|
||||||
|
desired_inbounds.insert(inbound.tag.clone(), desired_inbound);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(desired_inbounds)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get users for specific inbound from database
|
||||||
|
async fn get_users_for_inbound(db: &DatabaseManager, inbound_id: Uuid) -> Result<Vec<XrayUser>> {
|
||||||
|
let inbound_users_repo = InboundUsersRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
let inbound_users = inbound_users_repo
|
||||||
|
.find_active_by_inbound_id(inbound_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Get user details to generate emails
|
||||||
|
let user_repo = UserRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
let mut users: Vec<XrayUser> = Vec::new();
|
||||||
|
for inbound_user in inbound_users {
|
||||||
|
if let Some(user) = user_repo.find_by_id(inbound_user.user_id).await? {
|
||||||
|
let email = inbound_user.generate_client_email(&user.name);
|
||||||
|
users.push(XrayUser {
|
||||||
|
id: inbound_user.xray_user_id,
|
||||||
|
email,
|
||||||
|
level: inbound_user.level,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(users)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load certificate from database
|
||||||
|
async fn load_certificate_from_db(
|
||||||
|
db: &DatabaseManager,
|
||||||
|
cert_id: Option<Uuid>,
|
||||||
|
) -> Result<(Option<String>, Option<String>)> {
|
||||||
|
let cert_id = match cert_id {
|
||||||
|
Some(id) => id,
|
||||||
|
None => return Ok((None, None)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let cert_repo = CertificateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
match cert_repo.find_by_id(cert_id).await? {
|
||||||
|
Some(cert) => {
|
||||||
|
debug!(
|
||||||
|
"Loaded certificate '{}' ({}) successfully",
|
||||||
|
cert.name, cert.id
|
||||||
|
);
|
||||||
|
Ok((Some(cert.certificate_pem()), Some(cert.private_key_pem())))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
warn!("Certificate {} not found", cert_id);
|
||||||
|
Ok((None, None))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Synchronize inbounds for a single server
|
||||||
|
async fn sync_server_inbounds(
|
||||||
|
xray_service: &XrayService,
|
||||||
|
server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
desired_inbounds: &HashMap<String, DesiredInbound>,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Use optimized batch sync with single client
|
||||||
|
xray_service
|
||||||
|
.sync_server_inbounds_optimized(server_id, endpoint, desired_inbounds)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync a single server by ID (for event-driven sync)
|
||||||
|
async fn sync_single_server_by_id(
|
||||||
|
xray_service: &XrayService,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
server_id: Uuid,
|
||||||
|
) -> Result<()> {
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let inbound_repo = ServerInboundRepository::new(db.connection().clone());
|
||||||
|
let template_repo = InboundTemplateRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Get server
|
||||||
|
let server = match server_repo.find_by_id(server_id).await? {
|
||||||
|
Some(server) => server,
|
||||||
|
None => {
|
||||||
|
warn!("Server {} not found for sync", server_id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// For now, sync all servers (can add active/inactive flag later)
|
||||||
|
|
||||||
|
// Get desired inbounds from database
|
||||||
|
let desired_inbounds =
|
||||||
|
get_desired_inbounds_from_db(db, &server, &inbound_repo, &template_repo).await?;
|
||||||
|
|
||||||
|
// Build endpoint
|
||||||
|
let endpoint = server.get_grpc_endpoint();
|
||||||
|
|
||||||
|
// Sync server
|
||||||
|
sync_server_inbounds(xray_service, server_id, &endpoint, &desired_inbounds).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents desired inbound configuration from database
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DesiredInbound {
|
||||||
|
pub tag: String,
|
||||||
|
pub port: i32,
|
||||||
|
pub protocol: String,
|
||||||
|
pub settings: Value,
|
||||||
|
pub stream_settings: Value,
|
||||||
|
pub users: Vec<XrayUser>,
|
||||||
|
pub cert_pem: Option<String>,
|
||||||
|
pub key_pem: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents xray user configuration
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct XrayUser {
|
||||||
|
pub id: String,
|
||||||
|
pub email: String,
|
||||||
|
pub level: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check and renew certificates that expire within 15 days
|
||||||
|
async fn check_and_renew_certificates(db: &DatabaseManager) -> Result<()> {
|
||||||
|
use crate::database::repository::DnsProviderRepository;
|
||||||
|
use crate::services::certificates::CertificateService;
|
||||||
|
|
||||||
|
info!("Starting certificate renewal check...");
|
||||||
|
|
||||||
|
let cert_repo = CertificateRepository::new(db.connection().clone());
|
||||||
|
let dns_repo = DnsProviderRepository::new(db.connection().clone());
|
||||||
|
let cert_service = CertificateService::with_db(db.connection().clone());
|
||||||
|
|
||||||
|
// Get all certificates
|
||||||
|
let certificates = cert_repo.find_all().await?;
|
||||||
|
let mut renewed_count = 0;
|
||||||
|
let mut checked_count = 0;
|
||||||
|
|
||||||
|
for cert in certificates {
|
||||||
|
// Only check Let's Encrypt certificates with auto_renew enabled
|
||||||
|
if cert.cert_type != "letsencrypt" || !cert.auto_renew {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
checked_count += 1;
|
||||||
|
|
||||||
|
// Check if certificate expires within 15 days
|
||||||
|
if cert.expires_soon(15) {
|
||||||
|
info!(
|
||||||
|
"Certificate '{}' (ID: {}) expires at {} - renewing...",
|
||||||
|
cert.name, cert.id, cert.expires_at
|
||||||
|
);
|
||||||
|
|
||||||
|
// Find the DNS provider used for this certificate
|
||||||
|
// For now, we'll use the first active Cloudflare provider
|
||||||
|
// In production, you might want to store the provider ID with the certificate
|
||||||
|
let providers = dns_repo.find_active_by_type("cloudflare").await?;
|
||||||
|
|
||||||
|
if providers.is_empty() {
|
||||||
|
error!(
|
||||||
|
"Cannot renew certificate '{}': No active Cloudflare DNS provider found",
|
||||||
|
cert.name
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let dns_provider = &providers[0];
|
||||||
|
|
||||||
|
// Need to get the ACME email - for now using a default
|
||||||
|
// In production, this should be stored with the certificate
|
||||||
|
let acme_email = "admin@example.com"; // TODO: Store this with certificate
|
||||||
|
|
||||||
|
// Attempt to renew the certificate
|
||||||
|
match cert_service
|
||||||
|
.generate_letsencrypt_certificate(
|
||||||
|
&cert.domain,
|
||||||
|
dns_provider.id,
|
||||||
|
acme_email,
|
||||||
|
false, // Use production Let's Encrypt
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok((new_cert_pem, new_key_pem)) => {
|
||||||
|
// Update the certificate in database
|
||||||
|
match cert_repo
|
||||||
|
.update_certificate_data(
|
||||||
|
cert.id,
|
||||||
|
&new_cert_pem,
|
||||||
|
&new_key_pem,
|
||||||
|
chrono::Utc::now() + chrono::Duration::days(90), // Let's Encrypt certs are valid for 90 days
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Successfully renewed certificate '{}'", cert.name);
|
||||||
|
renewed_count += 1;
|
||||||
|
|
||||||
|
// Trigger sync for all servers using this certificate
|
||||||
|
// This will be done via the event system
|
||||||
|
if let Err(e) = trigger_cert_renewal_sync(db, cert.id).await {
|
||||||
|
error!("Failed to trigger sync after certificate renewal: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
"Failed to save renewed certificate '{}' to database: {}",
|
||||||
|
cert.name, e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to renew certificate '{}': {:?}", cert.name, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(
|
||||||
|
"Certificate '{}' expires at {} - no renewal needed yet",
|
||||||
|
cert.name, cert.expires_at
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Certificate renewal check completed: checked {}, renewed {}",
|
||||||
|
checked_count, renewed_count
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trigger sync for all servers that use a specific certificate
|
||||||
|
async fn trigger_cert_renewal_sync(db: &DatabaseManager, cert_id: Uuid) -> Result<()> {
|
||||||
|
use crate::services::events::send_sync_event;
|
||||||
|
use crate::services::events::SyncEvent;
|
||||||
|
|
||||||
|
let inbound_repo = ServerInboundRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Find all server inbounds that use this certificate
|
||||||
|
let inbounds = inbound_repo.find_by_certificate_id(cert_id).await?;
|
||||||
|
|
||||||
|
// Collect unique server IDs
|
||||||
|
let mut server_ids = std::collections::HashSet::new();
|
||||||
|
for inbound in inbounds {
|
||||||
|
server_ids.insert(inbound.server_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger sync for each server
|
||||||
|
for server_id in server_ids {
|
||||||
|
// Get server name for better logging
|
||||||
|
let server_repo = ServerRepository::new(db.connection().clone());
|
||||||
|
let server_name = match server_repo.find_by_id(server_id).await {
|
||||||
|
Ok(Some(server)) => server.name,
|
||||||
|
_ => server_id.to_string(),
|
||||||
|
};
|
||||||
|
info!(
|
||||||
|
"Triggering sync for server '{}' ({}) after certificate renewal",
|
||||||
|
server_name, server_id
|
||||||
|
);
|
||||||
|
send_sync_event(SyncEvent::InboundChanged(server_id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
43
src/services/telegram/bot.rs
Normal file
43
src/services/telegram/bot.rs
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
use teloxide::{prelude::*, Bot};
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
use super::handlers::{self, Command};
|
||||||
|
use crate::config::AppConfig;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
|
||||||
|
/// Run the bot polling loop
|
||||||
|
pub async fn run_polling(
|
||||||
|
bot: Bot,
|
||||||
|
db: DatabaseManager,
|
||||||
|
app_config: AppConfig,
|
||||||
|
shutdown_rx: oneshot::Receiver<()>,
|
||||||
|
) {
|
||||||
|
tracing::info!("Starting Telegram bot polling...");
|
||||||
|
|
||||||
|
let handler = dptree::entry()
|
||||||
|
.branch(
|
||||||
|
Update::filter_message()
|
||||||
|
.branch(
|
||||||
|
dptree::entry()
|
||||||
|
.filter_command::<Command>()
|
||||||
|
.endpoint(handlers::handle_command),
|
||||||
|
)
|
||||||
|
.branch(dptree::endpoint(handlers::handle_message)),
|
||||||
|
)
|
||||||
|
.branch(Update::filter_callback_query().endpoint(handlers::handle_callback_query));
|
||||||
|
|
||||||
|
let mut dispatcher = Dispatcher::builder(bot.clone(), handler)
|
||||||
|
.dependencies(dptree::deps![db, app_config])
|
||||||
|
.enable_ctrlc_handler()
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Run dispatcher with shutdown signal
|
||||||
|
tokio::select! {
|
||||||
|
_ = dispatcher.dispatch() => {
|
||||||
|
tracing::info!("Telegram bot polling stopped");
|
||||||
|
}
|
||||||
|
_ = shutdown_rx => {
|
||||||
|
tracing::info!("Telegram bot received shutdown signal");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
46
src/services/telegram/error.rs
Normal file
46
src/services/telegram/error.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum TelegramError {
|
||||||
|
#[error("Bot is not configured")]
|
||||||
|
NotConfigured,
|
||||||
|
|
||||||
|
#[error("Bot is not running")]
|
||||||
|
NotRunning,
|
||||||
|
|
||||||
|
#[error("Invalid bot token")]
|
||||||
|
InvalidToken,
|
||||||
|
|
||||||
|
#[error("User not found")]
|
||||||
|
UserNotFound,
|
||||||
|
|
||||||
|
#[error("User is not authorized")]
|
||||||
|
Unauthorized,
|
||||||
|
|
||||||
|
#[error("Database error: {0}")]
|
||||||
|
Database(String),
|
||||||
|
|
||||||
|
#[error("Telegram API error: {0}")]
|
||||||
|
TelegramApi(String),
|
||||||
|
|
||||||
|
#[error("Other error: {0}")]
|
||||||
|
Other(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<teloxide::RequestError> for TelegramError {
|
||||||
|
fn from(err: teloxide::RequestError) -> Self {
|
||||||
|
Self::TelegramApi(err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<sea_orm::DbErr> for TelegramError {
|
||||||
|
fn from(err: sea_orm::DbErr) -> Self {
|
||||||
|
Self::Database(err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<anyhow::Error> for TelegramError {
|
||||||
|
fn from(err: anyhow::Error) -> Self {
|
||||||
|
Self::Other(err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
1657
src/services/telegram/handlers/admin.rs
Normal file
1657
src/services/telegram/handlers/admin.rs
Normal file
File diff suppressed because it is too large
Load Diff
256
src/services/telegram/handlers/mod.rs
Normal file
256
src/services/telegram/handlers/mod.rs
Normal file
@@ -0,0 +1,256 @@
|
|||||||
|
pub mod admin;
|
||||||
|
pub mod types;
|
||||||
|
pub mod user;
|
||||||
|
|
||||||
|
// Re-export main handler functions for easier access
|
||||||
|
pub use admin::*;
|
||||||
|
pub use types::*;
|
||||||
|
pub use user::*;
|
||||||
|
|
||||||
|
use crate::config::AppConfig;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
use teloxide::{prelude::*, types::CallbackQuery};
|
||||||
|
|
||||||
|
/// Handle bot commands
|
||||||
|
pub async fn handle_command(
|
||||||
|
bot: Bot,
|
||||||
|
msg: Message,
|
||||||
|
cmd: Command,
|
||||||
|
db: DatabaseManager,
|
||||||
|
_app_config: AppConfig,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let chat_id = msg.chat.id;
|
||||||
|
let from = &msg.from.ok_or("No user info")?;
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let user_repo = crate::database::repository::UserRepository::new(db.connection());
|
||||||
|
|
||||||
|
match cmd {
|
||||||
|
Command::Start => {
|
||||||
|
handle_start(bot, chat_id, telegram_id, from, &user_repo, &db).await?;
|
||||||
|
}
|
||||||
|
Command::Requests => {
|
||||||
|
// Check if user is admin
|
||||||
|
if user_repo
|
||||||
|
.is_telegram_id_admin(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
// Create a fake callback query for admin requests
|
||||||
|
// This is a workaround since the admin_requests function expects a callback query
|
||||||
|
// In practice, we could refactor this to not need a callback query
|
||||||
|
tracing::info!("Admin {} requested to view requests", telegram_id);
|
||||||
|
|
||||||
|
let message = "📋 Use the inline keyboard to view recent requests.";
|
||||||
|
let keyboard = teloxide::types::InlineKeyboardMarkup::new(vec![vec![
|
||||||
|
teloxide::types::InlineKeyboardButton::callback(
|
||||||
|
"📋 Recent Requests",
|
||||||
|
"admin_requests",
|
||||||
|
),
|
||||||
|
]]);
|
||||||
|
|
||||||
|
bot.send_message(chat_id, message)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = super::localization::LocalizationService::new();
|
||||||
|
bot.send_message(chat_id, l10n.get(lang, "unauthorized"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::Stats => {
|
||||||
|
// Check if user is admin
|
||||||
|
if user_repo
|
||||||
|
.is_telegram_id_admin(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
handle_stats(bot, chat_id, &db).await?;
|
||||||
|
} else {
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = super::localization::LocalizationService::new();
|
||||||
|
bot.send_message(chat_id, l10n.get(lang, "unauthorized"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::Broadcast { message } => {
|
||||||
|
// Check if user is admin
|
||||||
|
if user_repo
|
||||||
|
.is_telegram_id_admin(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
handle_broadcast(bot, chat_id, message, &user_repo).await?;
|
||||||
|
} else {
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = super::localization::LocalizationService::new();
|
||||||
|
bot.send_message(chat_id, l10n.get(lang, "unauthorized"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle regular messages (fallback)
|
||||||
|
pub async fn handle_message(
|
||||||
|
bot: Bot,
|
||||||
|
msg: Message,
|
||||||
|
db: DatabaseManager,
|
||||||
|
_app_config: AppConfig,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let chat_id = msg.chat.id;
|
||||||
|
let from = msg.from.as_ref().ok_or("No user info")?;
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let user_repo = crate::database::repository::UserRepository::new(db.connection());
|
||||||
|
|
||||||
|
// For non-command messages, just show the start menu
|
||||||
|
handle_start(bot, chat_id, telegram_id, from, &user_repo, &db).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle callback queries from inline keyboards
|
||||||
|
pub async fn handle_callback_query(
|
||||||
|
bot: Bot,
|
||||||
|
q: CallbackQuery,
|
||||||
|
db: DatabaseManager,
|
||||||
|
app_config: AppConfig,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
// Wrap all callback handling in a try-catch to send main menu on any error
|
||||||
|
let result = async {
|
||||||
|
if let Some(data) = &q.data {
|
||||||
|
if let Some(callback_data) = CallbackData::parse(data) {
|
||||||
|
match callback_data {
|
||||||
|
CallbackData::RequestAccess => {
|
||||||
|
handle_request_access(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::MyConfigs => {
|
||||||
|
handle_my_configs_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::SubscriptionLink => {
|
||||||
|
handle_subscription_link(bot.clone(), &q, &db, &app_config).await?;
|
||||||
|
}
|
||||||
|
CallbackData::Support => {
|
||||||
|
handle_support(bot.clone(), &q).await?;
|
||||||
|
}
|
||||||
|
CallbackData::AdminRequests => {
|
||||||
|
handle_admin_requests_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::RequestList(page) => {
|
||||||
|
handle_request_list(bot.clone(), &q, &db, page).await?;
|
||||||
|
}
|
||||||
|
CallbackData::ApproveRequest(request_id) => {
|
||||||
|
handle_approve_request(bot.clone(), &q, &request_id, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::DeclineRequest(request_id) => {
|
||||||
|
handle_decline_request(bot.clone(), &q, &request_id, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::ViewRequest(request_id) => {
|
||||||
|
handle_view_request(bot.clone(), &q, &request_id, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::ShowServerConfigs(encoded_server_name) => {
|
||||||
|
handle_show_server_configs(bot.clone(), &q, &encoded_server_name, &db)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
CallbackData::SelectServerAccess(request_id) => {
|
||||||
|
// The request_id is now the full UUID from the mapping
|
||||||
|
let short_id = types::generate_short_request_id(&request_id);
|
||||||
|
handle_select_server_access(bot.clone(), &q, &short_id, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::ToggleServer(request_id, server_id) => {
|
||||||
|
// Both IDs are now full UUIDs from the mapping
|
||||||
|
let short_request_id = types::generate_short_request_id(&request_id);
|
||||||
|
let short_server_id = types::generate_short_server_id(&server_id);
|
||||||
|
handle_toggle_server(
|
||||||
|
bot.clone(),
|
||||||
|
&q,
|
||||||
|
&short_request_id,
|
||||||
|
&short_server_id,
|
||||||
|
&db,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
CallbackData::ApplyServerAccess(request_id) => {
|
||||||
|
// The request_id is now the full UUID from the mapping
|
||||||
|
let short_id = types::generate_short_request_id(&request_id);
|
||||||
|
handle_apply_server_access(bot.clone(), &q, &short_id, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::Back => {
|
||||||
|
// Back to main menu - edit the existing message
|
||||||
|
handle_start_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::BackToConfigs => {
|
||||||
|
handle_my_configs_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::BackToRequests => {
|
||||||
|
handle_admin_requests_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::ManageUsers => {
|
||||||
|
handle_manage_users(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
CallbackData::UserList(page) => {
|
||||||
|
handle_user_list(bot.clone(), &q, &db, page).await?;
|
||||||
|
}
|
||||||
|
CallbackData::UserDetails(user_id) => {
|
||||||
|
handle_user_details(bot.clone(), &q, &db, &user_id).await?;
|
||||||
|
}
|
||||||
|
CallbackData::UserManageAccess(user_id) => {
|
||||||
|
handle_user_manage_access(bot.clone(), &q, &db, &user_id).await?;
|
||||||
|
}
|
||||||
|
CallbackData::UserToggleServer(user_id, server_id) => {
|
||||||
|
handle_user_toggle_server(bot.clone(), &q, &db, &user_id, &server_id)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
CallbackData::UserApplyAccess(user_id) => {
|
||||||
|
handle_user_apply_access(bot.clone(), &q, &db, &user_id).await?;
|
||||||
|
}
|
||||||
|
CallbackData::BackToUsers(page) => {
|
||||||
|
handle_user_list(bot.clone(), &q, &db, page).await?;
|
||||||
|
}
|
||||||
|
CallbackData::BackToMenu => {
|
||||||
|
handle_start_edit(bot.clone(), &q, &db).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::warn!("Unknown callback data: {}", data);
|
||||||
|
return Err("Invalid callback data".into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok::<(), Box<dyn std::error::Error + Send + Sync>>(())
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// If any error occurred, send main menu and answer callback query
|
||||||
|
if let Err(e) = result {
|
||||||
|
tracing::warn!(
|
||||||
|
"Error handling callback query '{}': {}",
|
||||||
|
q.data.as_deref().unwrap_or("None"),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
|
||||||
|
// Answer the callback query first to remove loading state
|
||||||
|
let _ = bot.answer_callback_query(q.id.clone()).await;
|
||||||
|
|
||||||
|
// Try to send main menu
|
||||||
|
if let Some(message) = q.message {
|
||||||
|
let chat_id = message.chat().id;
|
||||||
|
let from = &q.from;
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let user_repo = crate::database::repository::UserRepository::new(db.connection());
|
||||||
|
|
||||||
|
// Try to send main menu - if this fails too, just log it
|
||||||
|
if let Err(menu_error) =
|
||||||
|
handle_start(bot, chat_id, telegram_id, from, &user_repo, &db).await
|
||||||
|
{
|
||||||
|
tracing::error!(
|
||||||
|
"Failed to send main menu after callback error: {}",
|
||||||
|
menu_error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
291
src/services/telegram/handlers/types.rs
Normal file
291
src/services/telegram/handlers/types.rs
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
use teloxide::types::{InlineKeyboardButton, InlineKeyboardMarkup, User};
|
||||||
|
use teloxide::utils::command::BotCommands;
|
||||||
|
|
||||||
|
use super::super::localization::{Language, LocalizationService};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::{Arc, Mutex, OnceLock};
|
||||||
|
|
||||||
|
/// Available bot commands - keeping only admin commands
|
||||||
|
#[derive(BotCommands, Clone)]
|
||||||
|
#[command(rename_rule = "lowercase", description = "Admin commands:")]
|
||||||
|
pub enum Command {
|
||||||
|
#[command(description = "Start the bot")]
|
||||||
|
Start,
|
||||||
|
#[command(description = "[Admin] Manage user requests")]
|
||||||
|
Requests,
|
||||||
|
#[command(description = "[Admin] Show statistics")]
|
||||||
|
Stats,
|
||||||
|
#[command(description = "[Admin] Broadcast message", parse_with = "split")]
|
||||||
|
Broadcast { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Callback data for inline keyboard buttons
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum CallbackData {
|
||||||
|
RequestAccess,
|
||||||
|
MyConfigs,
|
||||||
|
SubscriptionLink,
|
||||||
|
Support,
|
||||||
|
AdminRequests,
|
||||||
|
RequestList(u32), // page number
|
||||||
|
ApproveRequest(String), // request_id
|
||||||
|
DeclineRequest(String), // request_id
|
||||||
|
ViewRequest(String), // request_id
|
||||||
|
ShowServerConfigs(String), // server_name encoded
|
||||||
|
Back,
|
||||||
|
BackToConfigs, // Back to configs list from server view
|
||||||
|
BackToRequests, // Back to requests list from request view
|
||||||
|
SelectServerAccess(String), // request_id - show server selection after approval
|
||||||
|
ToggleServer(String, String), // request_id, server_id - toggle server selection
|
||||||
|
ApplyServerAccess(String), // request_id - apply selected servers
|
||||||
|
ManageUsers,
|
||||||
|
UserList(u32), // page number
|
||||||
|
UserDetails(String), // user_id
|
||||||
|
UserManageAccess(String), // user_id
|
||||||
|
UserToggleServer(String, String), // user_id, server_id
|
||||||
|
UserApplyAccess(String), // user_id
|
||||||
|
BackToUsers(u32), // page number
|
||||||
|
BackToMenu,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallbackData {
|
||||||
|
pub fn parse(data: &str) -> Option<Self> {
|
||||||
|
match data {
|
||||||
|
"request_access" => Some(CallbackData::RequestAccess),
|
||||||
|
"my_configs" => Some(CallbackData::MyConfigs),
|
||||||
|
"subscription_link" => Some(CallbackData::SubscriptionLink),
|
||||||
|
"support" => Some(CallbackData::Support),
|
||||||
|
"admin_requests" => Some(CallbackData::AdminRequests),
|
||||||
|
"manage_users" => Some(CallbackData::ManageUsers),
|
||||||
|
"back" => Some(CallbackData::Back),
|
||||||
|
"back_to_configs" => Some(CallbackData::BackToConfigs),
|
||||||
|
"back_to_requests" => Some(CallbackData::BackToRequests),
|
||||||
|
"back_to_menu" => Some(CallbackData::BackToMenu),
|
||||||
|
_ => {
|
||||||
|
if let Some(id) = data.strip_prefix("approve:") {
|
||||||
|
Some(CallbackData::ApproveRequest(id.to_string()))
|
||||||
|
} else if let Some(id) = data.strip_prefix("decline:") {
|
||||||
|
Some(CallbackData::DeclineRequest(id.to_string()))
|
||||||
|
} else if let Some(id) = data.strip_prefix("view_request:") {
|
||||||
|
Some(CallbackData::ViewRequest(id.to_string()))
|
||||||
|
} else if let Some(server_name) = data.strip_prefix("server_configs:") {
|
||||||
|
Some(CallbackData::ShowServerConfigs(server_name.to_string()))
|
||||||
|
} else if let Some(short_id) = data.strip_prefix("s:") {
|
||||||
|
get_full_request_id(short_id).map(CallbackData::SelectServerAccess)
|
||||||
|
} else if let Some(rest) = data.strip_prefix("t:") {
|
||||||
|
let parts: Vec<&str> = rest.split(':').collect();
|
||||||
|
if parts.len() == 2 {
|
||||||
|
if let (Some(request_id), Some(server_id)) =
|
||||||
|
(get_full_request_id(parts[0]), get_full_server_id(parts[1]))
|
||||||
|
{
|
||||||
|
Some(CallbackData::ToggleServer(request_id, server_id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else if let Some(short_id) = data.strip_prefix("a:") {
|
||||||
|
get_full_request_id(short_id).map(CallbackData::ApplyServerAccess)
|
||||||
|
} else if let Some(page_str) = data.strip_prefix("request_list:") {
|
||||||
|
page_str.parse::<u32>().ok().map(CallbackData::RequestList)
|
||||||
|
} else if let Some(page_str) = data.strip_prefix("user_list:") {
|
||||||
|
page_str.parse::<u32>().ok().map(CallbackData::UserList)
|
||||||
|
} else if let Some(short_user_id) = data.strip_prefix("user_details:") {
|
||||||
|
get_full_user_id(short_user_id).map(CallbackData::UserDetails)
|
||||||
|
} else if let Some(short_user_id) = data.strip_prefix("user_manage:") {
|
||||||
|
get_full_user_id(short_user_id).map(CallbackData::UserManageAccess)
|
||||||
|
} else if let Some(rest) = data.strip_prefix("user_toggle:") {
|
||||||
|
let parts: Vec<&str> = rest.split(':').collect();
|
||||||
|
if parts.len() == 2 {
|
||||||
|
if let (Some(user_id), Some(server_id)) =
|
||||||
|
(get_full_user_id(parts[0]), get_full_server_id(parts[1]))
|
||||||
|
{
|
||||||
|
Some(CallbackData::UserToggleServer(user_id, server_id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else if let Some(short_user_id) = data.strip_prefix("user_apply:") {
|
||||||
|
get_full_user_id(short_user_id).map(CallbackData::UserApplyAccess)
|
||||||
|
} else if let Some(page_str) = data.strip_prefix("back_users:") {
|
||||||
|
page_str.parse::<u32>().ok().map(CallbackData::BackToUsers)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global storage for selected servers per request
|
||||||
|
static SELECTED_SERVERS: OnceLock<Arc<Mutex<HashMap<String, Vec<String>>>>> = OnceLock::new();
|
||||||
|
|
||||||
|
// Global storage for request ID mappings (short ID -> full UUID)
|
||||||
|
static REQUEST_ID_MAP: OnceLock<Arc<Mutex<HashMap<String, String>>>> = OnceLock::new();
|
||||||
|
static REQUEST_COUNTER: OnceLock<Arc<Mutex<u32>>> = OnceLock::new();
|
||||||
|
|
||||||
|
// Global storage for server ID mappings (short ID -> full UUID)
|
||||||
|
static SERVER_ID_MAP: OnceLock<Arc<Mutex<HashMap<String, String>>>> = OnceLock::new();
|
||||||
|
static SERVER_COUNTER: OnceLock<Arc<Mutex<u32>>> = OnceLock::new();
|
||||||
|
|
||||||
|
// Global storage for user ID mappings (short ID -> full UUID)
|
||||||
|
static USER_ID_MAP: OnceLock<Arc<Mutex<HashMap<String, String>>>> = OnceLock::new();
|
||||||
|
static USER_COUNTER: OnceLock<Arc<Mutex<u32>>> = OnceLock::new();
|
||||||
|
|
||||||
|
pub fn get_selected_servers() -> &'static Arc<Mutex<HashMap<String, Vec<String>>>> {
|
||||||
|
SELECTED_SERVERS.get_or_init(|| Arc::new(Mutex::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_request_id_map() -> &'static Arc<Mutex<HashMap<String, String>>> {
|
||||||
|
REQUEST_ID_MAP.get_or_init(|| Arc::new(Mutex::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_request_counter() -> &'static Arc<Mutex<u32>> {
|
||||||
|
REQUEST_COUNTER.get_or_init(|| Arc::new(Mutex::new(0)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_id_map() -> &'static Arc<Mutex<HashMap<String, String>>> {
|
||||||
|
SERVER_ID_MAP.get_or_init(|| Arc::new(Mutex::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_server_counter() -> &'static Arc<Mutex<u32>> {
|
||||||
|
SERVER_COUNTER.get_or_init(|| Arc::new(Mutex::new(0)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_user_id_map() -> &'static Arc<Mutex<HashMap<String, String>>> {
|
||||||
|
USER_ID_MAP.get_or_init(|| Arc::new(Mutex::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_user_counter() -> &'static Arc<Mutex<u32>> {
|
||||||
|
USER_COUNTER.get_or_init(|| Arc::new(Mutex::new(0)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a short ID for a request UUID and store the mapping
|
||||||
|
pub fn generate_short_request_id(request_uuid: &str) -> String {
|
||||||
|
let mut counter = get_request_counter().lock().unwrap();
|
||||||
|
let mut map = get_request_id_map().lock().unwrap();
|
||||||
|
|
||||||
|
// Check if we already have a short ID for this UUID
|
||||||
|
for (short_id, uuid) in map.iter() {
|
||||||
|
if uuid == request_uuid {
|
||||||
|
return short_id.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate new short ID
|
||||||
|
*counter += 1;
|
||||||
|
let short_id = format!("r{}", counter);
|
||||||
|
map.insert(short_id.clone(), request_uuid.to_string());
|
||||||
|
|
||||||
|
short_id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get full UUID from short ID
|
||||||
|
pub fn get_full_request_id(short_id: &str) -> Option<String> {
|
||||||
|
let map = get_request_id_map().lock().unwrap();
|
||||||
|
map.get(short_id).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a short ID for a server UUID and store the mapping
|
||||||
|
pub fn generate_short_server_id(server_uuid: &str) -> String {
|
||||||
|
let mut counter = get_server_counter().lock().unwrap();
|
||||||
|
let mut map = get_server_id_map().lock().unwrap();
|
||||||
|
|
||||||
|
// Check if we already have a short ID for this UUID
|
||||||
|
for (short_id, uuid) in map.iter() {
|
||||||
|
if uuid == server_uuid {
|
||||||
|
return short_id.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate new short ID
|
||||||
|
*counter += 1;
|
||||||
|
let short_id = format!("s{}", counter);
|
||||||
|
map.insert(short_id.clone(), server_uuid.to_string());
|
||||||
|
|
||||||
|
short_id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get full server UUID from short ID
|
||||||
|
pub fn get_full_server_id(short_id: &str) -> Option<String> {
|
||||||
|
let map = get_server_id_map().lock().unwrap();
|
||||||
|
map.get(short_id).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a short ID for a user UUID and store the mapping
|
||||||
|
pub fn generate_short_user_id(user_uuid: &str) -> String {
|
||||||
|
let mut counter = get_user_counter().lock().unwrap();
|
||||||
|
let mut map = get_user_id_map().lock().unwrap();
|
||||||
|
|
||||||
|
// Check if we already have a short ID for this UUID
|
||||||
|
for (short_id, uuid) in map.iter() {
|
||||||
|
if uuid == user_uuid {
|
||||||
|
return short_id.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate new short ID
|
||||||
|
*counter += 1;
|
||||||
|
let short_id = format!("u{}", counter);
|
||||||
|
map.insert(short_id.clone(), user_uuid.to_string());
|
||||||
|
|
||||||
|
short_id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get full user UUID from short ID
|
||||||
|
pub fn get_full_user_id(short_id: &str) -> Option<String> {
|
||||||
|
let map = get_user_id_map().lock().unwrap();
|
||||||
|
map.get(short_id).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to get user language from Telegram user data
|
||||||
|
pub fn get_user_language(user: &User) -> Language {
|
||||||
|
Language::from_telegram_code(user.language_code.as_deref())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Main keyboard for registered users
|
||||||
|
pub fn get_main_keyboard(is_admin: bool, lang: Language) -> InlineKeyboardMarkup {
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
|
||||||
|
let mut keyboard = vec![
|
||||||
|
vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "subscription_link"),
|
||||||
|
"subscription_link",
|
||||||
|
)],
|
||||||
|
vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "my_configs"),
|
||||||
|
"my_configs",
|
||||||
|
)],
|
||||||
|
vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "support"),
|
||||||
|
"support",
|
||||||
|
)],
|
||||||
|
];
|
||||||
|
|
||||||
|
if is_admin {
|
||||||
|
keyboard.push(vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "user_requests"),
|
||||||
|
"admin_requests",
|
||||||
|
)]);
|
||||||
|
keyboard.push(vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang, "manage_users"),
|
||||||
|
"manage_users",
|
||||||
|
)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
InlineKeyboardMarkup::new(keyboard)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Keyboard for new users
|
||||||
|
pub fn get_new_user_keyboard(lang: Language) -> InlineKeyboardMarkup {
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
|
||||||
|
InlineKeyboardMarkup::new(vec![vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang, "get_vpn_access"),
|
||||||
|
"request_access",
|
||||||
|
)]])
|
||||||
|
}
|
||||||
|
|
||||||
872
src/services/telegram/handlers/user.rs
Normal file
872
src/services/telegram/handlers/user.rs
Normal file
@@ -0,0 +1,872 @@
|
|||||||
|
use base64::{engine::general_purpose, Engine};
|
||||||
|
use teloxide::{
|
||||||
|
prelude::*,
|
||||||
|
types::{InlineKeyboardButton, InlineKeyboardMarkup},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::super::localization::{Language, LocalizationService};
|
||||||
|
use super::types::{get_main_keyboard, get_new_user_keyboard, get_user_language};
|
||||||
|
use crate::database::entities::user_request::CreateUserRequestDto;
|
||||||
|
use crate::database::repository::{UserRepository, UserRequestRepository};
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
|
||||||
|
/// Handle start command and main menu
|
||||||
|
pub async fn handle_start(
|
||||||
|
bot: Bot,
|
||||||
|
chat_id: ChatId,
|
||||||
|
telegram_id: i64,
|
||||||
|
from: &teloxide::types::User,
|
||||||
|
user_repo: &UserRepository,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
handle_start_impl(bot, chat_id, telegram_id, from, user_repo, db, None, None).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle start with message editing support
|
||||||
|
pub async fn handle_start_edit(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = &q.from;
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
let chat_id = regular_msg.chat.id;
|
||||||
|
handle_start_impl(
|
||||||
|
bot.clone(),
|
||||||
|
chat_id,
|
||||||
|
telegram_id,
|
||||||
|
from,
|
||||||
|
&user_repo,
|
||||||
|
db,
|
||||||
|
Some(regular_msg.id),
|
||||||
|
Some(q.id.clone()),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Internal implementation of handle_start with optional message editing
|
||||||
|
async fn handle_start_impl(
|
||||||
|
bot: Bot,
|
||||||
|
chat_id: ChatId,
|
||||||
|
telegram_id: i64,
|
||||||
|
from: &teloxide::types::User,
|
||||||
|
user_repo: &UserRepository,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
edit_message_id: Option<teloxide::types::MessageId>,
|
||||||
|
callback_query_id: Option<String>,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
|
||||||
|
// Check if user exists in our database
|
||||||
|
match user_repo.get_by_telegram_id(telegram_id).await {
|
||||||
|
Ok(Some(user)) => {
|
||||||
|
// Check if user is admin
|
||||||
|
let is_admin = user_repo
|
||||||
|
.is_telegram_id_admin(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
// Check if user has any pending requests
|
||||||
|
let request_repo = UserRequestRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Check for existing requests
|
||||||
|
if let Ok(existing_requests) = request_repo.find_by_telegram_id(telegram_id).await {
|
||||||
|
if let Some(latest_request) = existing_requests
|
||||||
|
.into_iter()
|
||||||
|
.filter(|r| {
|
||||||
|
r.status == "pending" || r.status == "approved" || r.status == "declined"
|
||||||
|
})
|
||||||
|
.max_by_key(|r| r.created_at)
|
||||||
|
{
|
||||||
|
match latest_request.status.as_str() {
|
||||||
|
"pending" => {
|
||||||
|
let message = l10n.format(
|
||||||
|
lang.clone(),
|
||||||
|
"request_pending",
|
||||||
|
&[
|
||||||
|
("status", "⏳ pending"),
|
||||||
|
(
|
||||||
|
"date",
|
||||||
|
&latest_request
|
||||||
|
.created_at
|
||||||
|
.format("%Y-%m-%d %H:%M UTC")
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
let keyboard = get_new_user_keyboard(lang);
|
||||||
|
|
||||||
|
if let Some(msg_id) = edit_message_id {
|
||||||
|
bot.edit_message_text(chat_id, msg_id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(cb_id) = callback_query_id {
|
||||||
|
bot.answer_callback_query(cb_id).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bot.send_message(chat_id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
"declined" => {
|
||||||
|
let message = l10n.format(
|
||||||
|
lang.clone(),
|
||||||
|
"request_pending",
|
||||||
|
&[
|
||||||
|
("status", &l10n.get(lang.clone(), "request_declined_status")),
|
||||||
|
(
|
||||||
|
"date",
|
||||||
|
&latest_request
|
||||||
|
.created_at
|
||||||
|
.format("%Y-%m-%d %H:%M UTC")
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
let keyboard = get_new_user_keyboard(lang);
|
||||||
|
|
||||||
|
if let Some(msg_id) = edit_message_id {
|
||||||
|
bot.edit_message_text(chat_id, msg_id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(cb_id) = callback_query_id {
|
||||||
|
bot.answer_callback_query(cb_id).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bot.send_message(chat_id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
_ => {} // approved - continue with normal flow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Existing user - show main menu
|
||||||
|
let message = l10n.format(lang.clone(), "welcome_back", &[("name", &user.name)]);
|
||||||
|
let keyboard = get_main_keyboard(is_admin, lang);
|
||||||
|
|
||||||
|
if let Some(msg_id) = edit_message_id {
|
||||||
|
bot.edit_message_text(chat_id, msg_id, message)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(cb_id) = callback_query_id {
|
||||||
|
bot.answer_callback_query(cb_id).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bot.send_message(chat_id, message)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
// New user - show access request
|
||||||
|
let username = from.username.as_deref().unwrap_or("Unknown");
|
||||||
|
let message = l10n.format(lang.clone(), "welcome_new_user", &[("username", username)]);
|
||||||
|
let keyboard = get_new_user_keyboard(lang);
|
||||||
|
|
||||||
|
if let Some(msg_id) = edit_message_id {
|
||||||
|
bot.edit_message_text(chat_id, msg_id, message)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(cb_id) = callback_query_id {
|
||||||
|
bot.answer_callback_query(cb_id).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bot.send_message(chat_id, message)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Database error: {}", e);
|
||||||
|
bot.send_message(chat_id, "Database error occurred").await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle access request
|
||||||
|
pub async fn handle_request_access(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = &q.from;
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let chat_id = q
|
||||||
|
.message
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|m| match m {
|
||||||
|
teloxide::types::MaybeInaccessibleMessage::Regular(msg) => Some(msg.chat.id),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.ok_or("No chat ID")?;
|
||||||
|
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
let request_repo = UserRequestRepository::new(db.connection().clone());
|
||||||
|
|
||||||
|
// Check if user already exists
|
||||||
|
if let Some(_) = user_repo
|
||||||
|
.get_by_telegram_id(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None)
|
||||||
|
{
|
||||||
|
bot.answer_callback_query(q.id.clone())
|
||||||
|
.text(l10n.get(lang, "already_approved"))
|
||||||
|
.await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for existing requests
|
||||||
|
if let Ok(existing_requests) = request_repo.find_by_telegram_id(telegram_id).await {
|
||||||
|
if let Some(latest_request) = existing_requests
|
||||||
|
.iter()
|
||||||
|
.filter(|r| r.status == "pending")
|
||||||
|
.max_by_key(|r| r.created_at)
|
||||||
|
{
|
||||||
|
// Show pending status in the message instead of just an alert
|
||||||
|
let message = l10n.format(
|
||||||
|
lang.clone(),
|
||||||
|
"request_pending",
|
||||||
|
&[
|
||||||
|
("status", "⏳ pending"),
|
||||||
|
(
|
||||||
|
"date",
|
||||||
|
&latest_request
|
||||||
|
.created_at
|
||||||
|
.format("%Y-%m-%d %H:%M UTC")
|
||||||
|
.to_string(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(message_ref) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(msg) = message_ref {
|
||||||
|
let _ = bot
|
||||||
|
.edit_message_text(chat_id, msg.id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(InlineKeyboardMarkup::new(vec![vec![
|
||||||
|
InlineKeyboardButton::callback(l10n.get(lang, "back"), "back"),
|
||||||
|
]]))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for declined requests - allow new request after decline
|
||||||
|
let _has_declined = existing_requests.iter().any(|r| r.status == "declined");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new access request
|
||||||
|
let dto = CreateUserRequestDto {
|
||||||
|
telegram_id,
|
||||||
|
telegram_first_name: Some(from.first_name.clone()),
|
||||||
|
telegram_last_name: from.last_name.clone(),
|
||||||
|
telegram_username: from.username.clone(),
|
||||||
|
request_message: Some("Access request via Telegram bot".to_string()),
|
||||||
|
language: lang.code().to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
match request_repo.create(dto).await {
|
||||||
|
Ok(request) => {
|
||||||
|
// Edit message to show success
|
||||||
|
if let Some(message) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(msg) = message {
|
||||||
|
let _ = bot
|
||||||
|
.edit_message_text(
|
||||||
|
chat_id,
|
||||||
|
msg.id,
|
||||||
|
l10n.get(lang.clone(), "request_submitted"),
|
||||||
|
)
|
||||||
|
.reply_markup(InlineKeyboardMarkup::new(vec![vec![
|
||||||
|
InlineKeyboardButton::callback(l10n.get(lang, "back"), "back"),
|
||||||
|
]]))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify admins
|
||||||
|
notify_admins_new_request(&bot, &request, db).await?;
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to create request: {}", e);
|
||||||
|
bot.answer_callback_query(q.id.clone())
|
||||||
|
.text(l10n.format(lang, "request_submit_failed", &[("error", &e.to_string())]))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle my configs with message editing
|
||||||
|
pub async fn handle_my_configs_edit(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = &q.from;
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let chat_id = q
|
||||||
|
.message
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|m| match m {
|
||||||
|
teloxide::types::MaybeInaccessibleMessage::Regular(msg) => Some(msg.chat.id),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.ok_or("No chat ID")?;
|
||||||
|
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
let inbound_users_repo =
|
||||||
|
crate::database::repository::InboundUsersRepository::new(db.connection().clone());
|
||||||
|
let uri_service = crate::services::UriGeneratorService::new();
|
||||||
|
|
||||||
|
if let Some(user) = user_repo
|
||||||
|
.get_by_telegram_id(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None)
|
||||||
|
{
|
||||||
|
// Get all active inbound users for this user
|
||||||
|
let inbound_users = inbound_users_repo
|
||||||
|
.find_by_user_id(user.id)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if inbound_users.is_empty() {
|
||||||
|
// Edit message to show no configs available
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
bot.edit_message_text(
|
||||||
|
chat_id,
|
||||||
|
regular_msg.id,
|
||||||
|
l10n.get(lang.clone(), "no_configs_available"),
|
||||||
|
)
|
||||||
|
.reply_markup(InlineKeyboardMarkup::new(vec![vec![
|
||||||
|
InlineKeyboardButton::callback(l10n.get(lang, "back"), "back"),
|
||||||
|
]]))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Structure to hold config with inbound_id
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct ConfigWithInbound {
|
||||||
|
client_config: crate::services::uri_generator::ClientConfig,
|
||||||
|
server_inbound_id: uuid::Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group configurations by server name
|
||||||
|
let mut servers: std::collections::HashMap<String, Vec<ConfigWithInbound>> =
|
||||||
|
std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for inbound_user in inbound_users {
|
||||||
|
if !inbound_user.is_active {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get client config data for this specific inbound
|
||||||
|
if let Ok(Some(config_data)) = inbound_users_repo
|
||||||
|
.get_client_config_data(user.id, inbound_user.server_inbound_id)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
match uri_service.generate_client_config(user.id, &config_data) {
|
||||||
|
Ok(client_config) => {
|
||||||
|
let config_with_inbound = ConfigWithInbound {
|
||||||
|
client_config: client_config.clone(),
|
||||||
|
server_inbound_id: inbound_user.server_inbound_id,
|
||||||
|
};
|
||||||
|
|
||||||
|
servers
|
||||||
|
.entry(client_config.server_name.clone())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(config_with_inbound);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("Failed to generate client config: {}", e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build message with statistics only
|
||||||
|
let mut message_lines = vec![l10n.get(lang.clone(), "your_configurations")];
|
||||||
|
|
||||||
|
// Calculate statistics
|
||||||
|
let server_count = servers.len();
|
||||||
|
let total_configs = servers.values().map(|configs| configs.len()).sum::<usize>();
|
||||||
|
|
||||||
|
// Count unique protocols
|
||||||
|
let mut protocols = std::collections::HashSet::new();
|
||||||
|
for configs in servers.values() {
|
||||||
|
for config_with_inbound in configs {
|
||||||
|
protocols.insert(config_with_inbound.client_config.protocol.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let server_word = match lang {
|
||||||
|
Language::Russian => {
|
||||||
|
if server_count == 1 {
|
||||||
|
"сервер"
|
||||||
|
} else if server_count < 5 {
|
||||||
|
"сервера"
|
||||||
|
} else {
|
||||||
|
"серверов"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Language::English => {
|
||||||
|
if server_count == 1 {
|
||||||
|
"server"
|
||||||
|
} else {
|
||||||
|
"servers"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let config_word = match lang {
|
||||||
|
Language::Russian => {
|
||||||
|
if total_configs == 1 {
|
||||||
|
"конфигурация"
|
||||||
|
} else if total_configs < 5 {
|
||||||
|
"конфигурации"
|
||||||
|
} else {
|
||||||
|
"конфигураций"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Language::English => {
|
||||||
|
if total_configs == 1 {
|
||||||
|
"configuration"
|
||||||
|
} else {
|
||||||
|
"configurations"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let protocol_word = match lang {
|
||||||
|
Language::Russian => {
|
||||||
|
if protocols.len() == 1 {
|
||||||
|
"протокол"
|
||||||
|
} else if protocols.len() < 5 {
|
||||||
|
"протокола"
|
||||||
|
} else {
|
||||||
|
"протоколов"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Language::English => {
|
||||||
|
if protocols.len() == 1 {
|
||||||
|
"protocol"
|
||||||
|
} else {
|
||||||
|
"protocols"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
message_lines.push(format!(
|
||||||
|
"\n📊 {} {} • {} {} • {} {}",
|
||||||
|
server_count,
|
||||||
|
server_word,
|
||||||
|
total_configs,
|
||||||
|
config_word,
|
||||||
|
protocols.len(),
|
||||||
|
protocol_word
|
||||||
|
));
|
||||||
|
|
||||||
|
// Create keyboard with buttons for each server
|
||||||
|
let mut keyboard_buttons = vec![];
|
||||||
|
|
||||||
|
for (server_name, configs) in servers.iter() {
|
||||||
|
// Encode server name to avoid issues with special characters
|
||||||
|
let encoded_server_name = general_purpose::STANDARD.encode(server_name.as_bytes());
|
||||||
|
let config_count = configs.len();
|
||||||
|
|
||||||
|
let config_suffix = match lang {
|
||||||
|
Language::Russian => {
|
||||||
|
if config_count == 1 {
|
||||||
|
""
|
||||||
|
} else if config_count < 5 {
|
||||||
|
"а"
|
||||||
|
} else {
|
||||||
|
"ов"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Language::English => {
|
||||||
|
if config_count == 1 {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
"s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let config_word = match lang {
|
||||||
|
Language::Russian => "конфиг",
|
||||||
|
Language::English => "config",
|
||||||
|
};
|
||||||
|
|
||||||
|
keyboard_buttons.push(vec![InlineKeyboardButton::callback(
|
||||||
|
format!(
|
||||||
|
"🖥️ {} ({} {}{})",
|
||||||
|
server_name, config_count, config_word, config_suffix
|
||||||
|
),
|
||||||
|
format!("server_configs:{}", encoded_server_name),
|
||||||
|
)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyboard_buttons.push(vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang, "back"),
|
||||||
|
"back",
|
||||||
|
)]);
|
||||||
|
|
||||||
|
let message = message_lines.join("\n");
|
||||||
|
|
||||||
|
// Edit the existing message instead of sending a new one
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
bot.edit_message_text(chat_id, regular_msg.id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(InlineKeyboardMarkup::new(keyboard_buttons))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle show server configs callback
|
||||||
|
pub async fn handle_show_server_configs(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
encoded_server_name: &str,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = &q.from;
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let chat_id = q
|
||||||
|
.message
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|m| match m {
|
||||||
|
teloxide::types::MaybeInaccessibleMessage::Regular(msg) => Some(msg.chat.id),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.ok_or("No chat ID")?;
|
||||||
|
|
||||||
|
// Decode server name
|
||||||
|
let server_name = match general_purpose::STANDARD.decode(encoded_server_name) {
|
||||||
|
Ok(bytes) => String::from_utf8(bytes).map_err(|_| "Invalid server name encoding")?,
|
||||||
|
Err(_) => return Ok(()), // Invalid encoding, ignore
|
||||||
|
};
|
||||||
|
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
let inbound_users_repo =
|
||||||
|
crate::database::repository::InboundUsersRepository::new(db.connection().clone());
|
||||||
|
let uri_service = crate::services::UriGeneratorService::new();
|
||||||
|
|
||||||
|
// Get user from telegram_id
|
||||||
|
if let Some(user) = user_repo
|
||||||
|
.get_by_telegram_id(telegram_id)
|
||||||
|
.await
|
||||||
|
.unwrap_or(None)
|
||||||
|
{
|
||||||
|
// Get all active inbound users for this user
|
||||||
|
let inbound_users = inbound_users_repo
|
||||||
|
.find_by_user_id(user.id)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let mut server_configs = Vec::new();
|
||||||
|
|
||||||
|
for inbound_user in inbound_users {
|
||||||
|
if !inbound_user.is_active {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get client config data for this specific inbound
|
||||||
|
if let Ok(Some(config_data)) = inbound_users_repo
|
||||||
|
.get_client_config_data(user.id, inbound_user.server_inbound_id)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
if config_data.server_name == server_name {
|
||||||
|
match uri_service.generate_client_config(user.id, &config_data) {
|
||||||
|
Ok(client_config) => {
|
||||||
|
server_configs.push(client_config);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("Failed to generate client config: {}", e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if server_configs.is_empty() {
|
||||||
|
bot.answer_callback_query(q.id.clone())
|
||||||
|
.text(l10n.get(lang, "config_not_found"))
|
||||||
|
.await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build message with all configs for this server
|
||||||
|
let mut message_lines = vec![l10n.format(
|
||||||
|
lang.clone(),
|
||||||
|
"server_configs_title",
|
||||||
|
&[("server_name", &server_name)],
|
||||||
|
)];
|
||||||
|
|
||||||
|
for config in &server_configs {
|
||||||
|
let protocol_emoji = match config.protocol.as_str() {
|
||||||
|
"vless" => "🔵",
|
||||||
|
"vmess" => "🟢",
|
||||||
|
"trojan" => "🔴",
|
||||||
|
"shadowsocks" => "🟡",
|
||||||
|
_ => "⚪",
|
||||||
|
};
|
||||||
|
|
||||||
|
message_lines.push(format!(
|
||||||
|
"\n{} <b>{} - {}</b> ({})",
|
||||||
|
protocol_emoji,
|
||||||
|
config.server_name,
|
||||||
|
config.template_name,
|
||||||
|
config.protocol.to_uppercase()
|
||||||
|
));
|
||||||
|
|
||||||
|
message_lines.push(format!("<code>{}</code>", config.uri));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create back button
|
||||||
|
let keyboard = InlineKeyboardMarkup::new(vec![vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang, "back"),
|
||||||
|
"back_to_configs",
|
||||||
|
)]]);
|
||||||
|
|
||||||
|
let message = message_lines.join("\n");
|
||||||
|
|
||||||
|
// Edit the existing message instead of sending a new one
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
bot.edit_message_text(chat_id, regular_msg.id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
} else {
|
||||||
|
bot.answer_callback_query(q.id.clone())
|
||||||
|
.text(l10n.get(lang, "unauthorized"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle support button
|
||||||
|
pub async fn handle_support(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = &q.from;
|
||||||
|
let lang = get_user_language(from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
let chat_id = q
|
||||||
|
.message
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|m| match m {
|
||||||
|
teloxide::types::MaybeInaccessibleMessage::Regular(msg) => Some(msg.chat.id),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.ok_or("No chat ID")?;
|
||||||
|
|
||||||
|
let keyboard = InlineKeyboardMarkup::new(vec![vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "back"),
|
||||||
|
"back",
|
||||||
|
)]]);
|
||||||
|
|
||||||
|
// Edit the existing message instead of sending a new one
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
bot.edit_message_text(chat_id, regular_msg.id, l10n.get(lang, "support_info"))
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Notify admins about new access request
|
||||||
|
async fn notify_admins_new_request(
|
||||||
|
bot: &Bot,
|
||||||
|
request: &crate::database::entities::user_request::Model,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
|
||||||
|
// Get all admins
|
||||||
|
let admins = user_repo.get_telegram_admins().await.unwrap_or_default();
|
||||||
|
|
||||||
|
if !admins.is_empty() {
|
||||||
|
let lang = Language::English; // Default admin language
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
|
||||||
|
let message = l10n.format(
|
||||||
|
lang.clone(),
|
||||||
|
"new_access_request",
|
||||||
|
&[
|
||||||
|
(
|
||||||
|
"first_name",
|
||||||
|
&request.telegram_first_name.as_deref().unwrap_or(""),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"last_name",
|
||||||
|
&request.telegram_last_name.as_deref().unwrap_or(""),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"username",
|
||||||
|
&request.telegram_username.as_deref().unwrap_or("unknown"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
let keyboard = InlineKeyboardMarkup::new(vec![
|
||||||
|
vec![
|
||||||
|
InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "approve"),
|
||||||
|
format!("approve:{}", request.id),
|
||||||
|
),
|
||||||
|
InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang.clone(), "decline"),
|
||||||
|
format!("decline:{}", request.id),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
vec![InlineKeyboardButton::callback(
|
||||||
|
"📋 All Requests",
|
||||||
|
"back_to_requests",
|
||||||
|
)],
|
||||||
|
]);
|
||||||
|
|
||||||
|
for admin in admins {
|
||||||
|
if let Some(telegram_id) = admin.telegram_id {
|
||||||
|
let _ = bot
|
||||||
|
.send_message(ChatId(telegram_id), &message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard.clone())
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle subscription link request
|
||||||
|
pub async fn handle_subscription_link(
|
||||||
|
bot: Bot,
|
||||||
|
q: &CallbackQuery,
|
||||||
|
db: &DatabaseManager,
|
||||||
|
app_config: &crate::config::AppConfig,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let from = q.from.clone();
|
||||||
|
let telegram_id = from.id.0 as i64;
|
||||||
|
let lang = get_user_language(&from);
|
||||||
|
let l10n = LocalizationService::new();
|
||||||
|
|
||||||
|
// Get user from database
|
||||||
|
let user_repo = UserRepository::new(db.connection());
|
||||||
|
if let Ok(Some(user)) = user_repo.get_by_telegram_id(telegram_id).await {
|
||||||
|
// Generate subscription URL
|
||||||
|
let subscription_url = format!("{}/sub/{}", app_config.web.base_url, user.id);
|
||||||
|
|
||||||
|
let message = match lang {
|
||||||
|
Language::Russian => {
|
||||||
|
format!(
|
||||||
|
"🔗 <b>Ваша ссылка подписки</b>\n\n\
|
||||||
|
Скопируйте эту ссылку и добавьте её в ваш VPN-клиент:\n\n\
|
||||||
|
<code>{}</code>\n\n\
|
||||||
|
💡 <i>Эта ссылка содержит все ваши конфигурации и автоматически обновляется при изменениях</i>",
|
||||||
|
subscription_url
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Language::English => {
|
||||||
|
format!(
|
||||||
|
"🔗 <b>Your Subscription Link</b>\n\n\
|
||||||
|
Copy this link and add it to your VPN client:\n\n\
|
||||||
|
<code>{}</code>\n\n\
|
||||||
|
💡 <i>This link contains all your configurations and updates automatically when changes are made</i>",
|
||||||
|
subscription_url
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let keyboard = InlineKeyboardMarkup::new(vec![vec![InlineKeyboardButton::callback(
|
||||||
|
l10n.get(lang, "back"),
|
||||||
|
"back",
|
||||||
|
)]]);
|
||||||
|
|
||||||
|
// Edit the existing message
|
||||||
|
if let Some(msg) = &q.message {
|
||||||
|
if let teloxide::types::MaybeInaccessibleMessage::Regular(regular_msg) = msg {
|
||||||
|
let chat_id = regular_msg.chat.id;
|
||||||
|
bot.edit_message_text(chat_id, regular_msg.id, message)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// User not found - this shouldn't happen for registered users
|
||||||
|
bot.answer_callback_query(q.id.clone())
|
||||||
|
.text("User not found")
|
||||||
|
.await?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
bot.answer_callback_query(q.id.clone()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
373
src/services/telegram/localization/mod.rs
Normal file
373
src/services/telegram/localization/mod.rs
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum Language {
|
||||||
|
Russian,
|
||||||
|
English,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Language {
|
||||||
|
pub fn from_telegram_code(code: Option<&str>) -> Self {
|
||||||
|
match code {
|
||||||
|
Some("ru") | Some("by") | Some("kk") | Some("uk") => Self::Russian,
|
||||||
|
_ => Self::English, // Default to English
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn code(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Russian => "ru",
|
||||||
|
Self::English => "en",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct Translations {
|
||||||
|
pub welcome_new_user: String,
|
||||||
|
pub welcome_back: String,
|
||||||
|
pub request_pending: String,
|
||||||
|
pub request_approved_status: String,
|
||||||
|
pub request_declined_status: String,
|
||||||
|
pub get_vpn_access: String,
|
||||||
|
pub my_configs: String,
|
||||||
|
pub support: String,
|
||||||
|
pub user_requests: String,
|
||||||
|
pub back: String,
|
||||||
|
pub approve: String,
|
||||||
|
pub decline: String,
|
||||||
|
|
||||||
|
// Request handling
|
||||||
|
pub already_pending: String,
|
||||||
|
pub already_approved: String,
|
||||||
|
pub already_declined: String,
|
||||||
|
pub request_submitted: String,
|
||||||
|
pub request_submit_failed: String,
|
||||||
|
|
||||||
|
// Approval/Decline messages
|
||||||
|
pub request_approved: String,
|
||||||
|
pub request_declined: String,
|
||||||
|
pub request_approved_notification: String,
|
||||||
|
pub request_declined_notification: String,
|
||||||
|
|
||||||
|
// Admin messages
|
||||||
|
pub new_access_request: String,
|
||||||
|
pub no_pending_requests: String,
|
||||||
|
pub access_request_details: String,
|
||||||
|
pub unauthorized: String,
|
||||||
|
pub request_approved_admin: String,
|
||||||
|
pub request_declined_admin: String,
|
||||||
|
pub user_creation_failed: String,
|
||||||
|
|
||||||
|
// Support
|
||||||
|
pub support_info: String,
|
||||||
|
|
||||||
|
// Stats
|
||||||
|
pub statistics: String,
|
||||||
|
pub total_users: String,
|
||||||
|
pub total_servers: String,
|
||||||
|
pub total_inbounds: String,
|
||||||
|
pub pending_requests: String,
|
||||||
|
|
||||||
|
// Broadcast
|
||||||
|
pub broadcast_complete: String,
|
||||||
|
pub sent: String,
|
||||||
|
pub failed: String,
|
||||||
|
|
||||||
|
// Configs
|
||||||
|
pub configs_coming_soon: String,
|
||||||
|
pub your_configurations: String,
|
||||||
|
pub no_configs_available: String,
|
||||||
|
pub config_copy_message: String,
|
||||||
|
pub config_copied: String,
|
||||||
|
pub config_not_found: String,
|
||||||
|
pub server_configs_title: String,
|
||||||
|
|
||||||
|
// Subscription
|
||||||
|
pub subscription_link: String,
|
||||||
|
|
||||||
|
// User Management
|
||||||
|
pub manage_users: String,
|
||||||
|
pub user_list: String,
|
||||||
|
pub user_details: String,
|
||||||
|
pub manage_access: String,
|
||||||
|
pub remove_access: String,
|
||||||
|
pub grant_access: String,
|
||||||
|
pub user_info: String,
|
||||||
|
pub no_users_found: String,
|
||||||
|
pub page_info: String,
|
||||||
|
pub next_page: String,
|
||||||
|
pub prev_page: String,
|
||||||
|
pub back_to_users: String,
|
||||||
|
pub back_to_menu: String,
|
||||||
|
pub access_updated: String,
|
||||||
|
pub access_removed: String,
|
||||||
|
pub access_granted: String,
|
||||||
|
|
||||||
|
// Errors
|
||||||
|
pub error_occurred: String,
|
||||||
|
pub admin_not_found: String,
|
||||||
|
pub request_not_found: String,
|
||||||
|
pub invalid_request_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LocalizationService {
|
||||||
|
translations: HashMap<Language, Translations>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalizationService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let mut translations = HashMap::new();
|
||||||
|
|
||||||
|
// Load English translations
|
||||||
|
translations.insert(Language::English, Self::load_english());
|
||||||
|
|
||||||
|
// Load Russian translations
|
||||||
|
translations.insert(Language::Russian, Self::load_russian());
|
||||||
|
|
||||||
|
Self { translations }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, lang: Language, key: &str) -> String {
|
||||||
|
let translations = self
|
||||||
|
.translations
|
||||||
|
.get(&lang)
|
||||||
|
.unwrap_or_else(|| self.translations.get(&Language::English).unwrap());
|
||||||
|
|
||||||
|
match key {
|
||||||
|
"welcome_new_user" => translations.welcome_new_user.clone(),
|
||||||
|
"welcome_back" => translations.welcome_back.clone(),
|
||||||
|
"request_pending" => translations.request_pending.clone(),
|
||||||
|
"request_approved_status" => translations.request_approved_status.clone(),
|
||||||
|
"request_declined_status" => translations.request_declined_status.clone(),
|
||||||
|
"get_vpn_access" => translations.get_vpn_access.clone(),
|
||||||
|
"my_configs" => translations.my_configs.clone(),
|
||||||
|
"support" => translations.support.clone(),
|
||||||
|
"user_requests" => translations.user_requests.clone(),
|
||||||
|
"back" => translations.back.clone(),
|
||||||
|
"approve" => translations.approve.clone(),
|
||||||
|
"decline" => translations.decline.clone(),
|
||||||
|
"already_pending" => translations.already_pending.clone(),
|
||||||
|
"already_approved" => translations.already_approved.clone(),
|
||||||
|
"already_declined" => translations.already_declined.clone(),
|
||||||
|
"request_submitted" => translations.request_submitted.clone(),
|
||||||
|
"request_submit_failed" => translations.request_submit_failed.clone(),
|
||||||
|
"request_approved" => translations.request_approved.clone(),
|
||||||
|
"request_declined" => translations.request_declined.clone(),
|
||||||
|
"request_approved_notification" => translations.request_approved_notification.clone(),
|
||||||
|
"request_declined_notification" => translations.request_declined_notification.clone(),
|
||||||
|
"new_access_request" => translations.new_access_request.clone(),
|
||||||
|
"no_pending_requests" => translations.no_pending_requests.clone(),
|
||||||
|
"access_request_details" => translations.access_request_details.clone(),
|
||||||
|
"unauthorized" => translations.unauthorized.clone(),
|
||||||
|
"request_approved_admin" => translations.request_approved_admin.clone(),
|
||||||
|
"request_declined_admin" => translations.request_declined_admin.clone(),
|
||||||
|
"user_creation_failed" => translations.user_creation_failed.clone(),
|
||||||
|
"support_info" => translations.support_info.clone(),
|
||||||
|
"statistics" => translations.statistics.clone(),
|
||||||
|
"total_users" => translations.total_users.clone(),
|
||||||
|
"total_servers" => translations.total_servers.clone(),
|
||||||
|
"total_inbounds" => translations.total_inbounds.clone(),
|
||||||
|
"pending_requests" => translations.pending_requests.clone(),
|
||||||
|
"broadcast_complete" => translations.broadcast_complete.clone(),
|
||||||
|
"sent" => translations.sent.clone(),
|
||||||
|
"failed" => translations.failed.clone(),
|
||||||
|
"configs_coming_soon" => translations.configs_coming_soon.clone(),
|
||||||
|
"your_configurations" => translations.your_configurations.clone(),
|
||||||
|
"no_configs_available" => translations.no_configs_available.clone(),
|
||||||
|
"config_copy_message" => translations.config_copy_message.clone(),
|
||||||
|
"config_copied" => translations.config_copied.clone(),
|
||||||
|
"config_not_found" => translations.config_not_found.clone(),
|
||||||
|
"server_configs_title" => translations.server_configs_title.clone(),
|
||||||
|
"subscription_link" => translations.subscription_link.clone(),
|
||||||
|
"manage_users" => translations.manage_users.clone(),
|
||||||
|
"user_list" => translations.user_list.clone(),
|
||||||
|
"user_details" => translations.user_details.clone(),
|
||||||
|
"manage_access" => translations.manage_access.clone(),
|
||||||
|
"remove_access" => translations.remove_access.clone(),
|
||||||
|
"grant_access" => translations.grant_access.clone(),
|
||||||
|
"user_info" => translations.user_info.clone(),
|
||||||
|
"no_users_found" => translations.no_users_found.clone(),
|
||||||
|
"page_info" => translations.page_info.clone(),
|
||||||
|
"next_page" => translations.next_page.clone(),
|
||||||
|
"prev_page" => translations.prev_page.clone(),
|
||||||
|
"back_to_users" => translations.back_to_users.clone(),
|
||||||
|
"back_to_menu" => translations.back_to_menu.clone(),
|
||||||
|
"access_updated" => translations.access_updated.clone(),
|
||||||
|
"access_removed" => translations.access_removed.clone(),
|
||||||
|
"access_granted" => translations.access_granted.clone(),
|
||||||
|
"error_occurred" => translations.error_occurred.clone(),
|
||||||
|
"admin_not_found" => translations.admin_not_found.clone(),
|
||||||
|
"request_not_found" => translations.request_not_found.clone(),
|
||||||
|
"invalid_request_id" => translations.invalid_request_id.clone(),
|
||||||
|
_ => format!("Missing translation: {}", key),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format(&self, lang: Language, template: &str, args: &[(&str, &str)]) -> String {
|
||||||
|
let mut result = self.get(lang, template);
|
||||||
|
for (placeholder, value) in args {
|
||||||
|
result = result.replace(&format!("{{{}}}", placeholder), value);
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_english() -> Translations {
|
||||||
|
Translations {
|
||||||
|
welcome_new_user: "👋 Welcome, {username}!\n\nI'm the OutFleet VPN bot. To get started, you'll need to request access.\n\nClick the button below to submit your access request:".to_string(),
|
||||||
|
welcome_back: "👋 Welcome back, {name}!\n\nWhat would you like to do?".to_string(),
|
||||||
|
request_pending: "👋 Hello!\n\nYour access request is currently <b>{status}</b>.\n\nRequest submitted: {date}".to_string(),
|
||||||
|
request_approved_status: "✅ approved".to_string(),
|
||||||
|
request_declined_status: "❌ declined".to_string(),
|
||||||
|
get_vpn_access: "🚀 Get VPN Access".to_string(),
|
||||||
|
my_configs: "📋 My Configs".to_string(),
|
||||||
|
support: "💬 Support".to_string(),
|
||||||
|
user_requests: "❔ User Requests".to_string(),
|
||||||
|
back: "🔙 Back".to_string(),
|
||||||
|
approve: "✅ Approve".to_string(),
|
||||||
|
decline: "❌ Decline".to_string(),
|
||||||
|
|
||||||
|
already_pending: "⏳ You already have a pending access request. Please wait for admin review.".to_string(),
|
||||||
|
already_approved: "✅ Your access request has already been approved. Use /start to access the main menu.".to_string(),
|
||||||
|
already_declined: "❌ Your previous access request was declined. Please contact administrators if you believe this is a mistake.".to_string(),
|
||||||
|
request_submitted: "✅ Your access request has been submitted!\n\nAn administrator will review your request soon. You'll receive a notification once it's processed.".to_string(),
|
||||||
|
request_submit_failed: "❌ Failed to submit request: {error}".to_string(),
|
||||||
|
|
||||||
|
request_approved: "✅ Request approved".to_string(),
|
||||||
|
request_declined: "❌ Request declined".to_string(),
|
||||||
|
request_approved_notification: "🎉 <b>Your access request has been approved!</b>\n\nWelcome to OutFleet VPN! Your account has been created.\n\nUser ID: <code>{user_id}</code>\n\nYou can now use /start to access the main menu.".to_string(),
|
||||||
|
request_declined_notification: "❌ Your access request has been declined.\n\nIf you believe this is a mistake, please contact the administrators.".to_string(),
|
||||||
|
|
||||||
|
new_access_request: "🔔 <b>New Access Request</b>\n\n👤 Name: {first_name} {last_name}\n🆔 Username: @{username}\n\nUse /requests to review".to_string(),
|
||||||
|
no_pending_requests: "No pending access requests".to_string(),
|
||||||
|
access_request_details: "❔ <b>Access Request</b>\n\n👤 Name: {full_name}\n🆔 Telegram: {telegram_link}\n📅 Requested: {date}\n\nMessage: {message}".to_string(),
|
||||||
|
unauthorized: "❌ You are not authorized to use this command".to_string(),
|
||||||
|
request_approved_admin: "✅ Request approved".to_string(),
|
||||||
|
request_declined_admin: "❌ Request declined".to_string(),
|
||||||
|
user_creation_failed: "❌ Failed to create user account: {error}\n\nPlease try again or contact technical support.".to_string(),
|
||||||
|
|
||||||
|
support_info: "💬 <b>Support Information</b>\n\n📱 <b>How to connect:</b>\n1. Download v2raytun app for Android or iOS from:\n https://v2raytun.com/\n\n2. Add your subscription link from \"🔗 Subscription Link\" menu\n OR\n Add individual server links from \"📋 My Configs\"\n\n3. Connect and enjoy secure VPN!\n\n❓ If you need help, please contact the administrators.".to_string(),
|
||||||
|
statistics: "📊 <b>Statistics</b>\n\n👥 Total Users: {users}\n🖥️ Total Servers: {servers}\n📡 Total Inbounds: {inbounds}\n⏳ Pending Requests: {pending}".to_string(),
|
||||||
|
total_users: "👥 Total Users".to_string(),
|
||||||
|
total_servers: "🖥️ Total Servers".to_string(),
|
||||||
|
total_inbounds: "📡 Total Inbounds".to_string(),
|
||||||
|
pending_requests: "⏳ Pending Requests".to_string(),
|
||||||
|
|
||||||
|
broadcast_complete: "✅ Broadcast complete\nSent: {sent}\nFailed: {failed}".to_string(),
|
||||||
|
sent: "Sent".to_string(),
|
||||||
|
failed: "Failed".to_string(),
|
||||||
|
|
||||||
|
configs_coming_soon: "📋 Your configurations will be shown here (coming soon)".to_string(),
|
||||||
|
your_configurations: "📋 <b>Your Configurations</b>".to_string(),
|
||||||
|
no_configs_available: "📋 No configurations available\n\nYou don't have access to any VPN configurations yet. Please contact an administrator to get access.".to_string(),
|
||||||
|
config_copy_message: "📋 <b>{server_name}</b> - {inbound_tag} ({protocol})\n\nConnection URI:".to_string(),
|
||||||
|
config_copied: "✅ Configuration copied to clipboard".to_string(),
|
||||||
|
config_not_found: "❌ Configuration not found".to_string(),
|
||||||
|
server_configs_title: "🖥️ <b>{server_name}</b> - Connection Links".to_string(),
|
||||||
|
|
||||||
|
subscription_link: "🔗 Subscription Link".to_string(),
|
||||||
|
manage_users: "👥 Manage Users".to_string(),
|
||||||
|
user_list: "👥 User List".to_string(),
|
||||||
|
user_details: "👤 User Details".to_string(),
|
||||||
|
manage_access: "🔧 Manage Access".to_string(),
|
||||||
|
remove_access: "❌ Remove Access".to_string(),
|
||||||
|
grant_access: "✅ Grant Access".to_string(),
|
||||||
|
user_info: "User Information".to_string(),
|
||||||
|
no_users_found: "No users found".to_string(),
|
||||||
|
page_info: "Page {page} of {total}".to_string(),
|
||||||
|
next_page: "Next →".to_string(),
|
||||||
|
prev_page: "← Previous".to_string(),
|
||||||
|
back_to_users: "👥 Back to Users".to_string(),
|
||||||
|
back_to_menu: "🏠 Main Menu".to_string(),
|
||||||
|
access_updated: "✅ Access updated successfully".to_string(),
|
||||||
|
access_removed: "❌ Access removed successfully".to_string(),
|
||||||
|
access_granted: "✅ Access granted successfully".to_string(),
|
||||||
|
|
||||||
|
error_occurred: "An error occurred".to_string(),
|
||||||
|
admin_not_found: "Admin not found".to_string(),
|
||||||
|
request_not_found: "Request not found".to_string(),
|
||||||
|
invalid_request_id: "Invalid request ID".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_russian() -> Translations {
|
||||||
|
Translations {
|
||||||
|
welcome_new_user: "👋 Добро пожаловать, {username}!\n\nЯ бот OutFleet VPN. Чтобы начать работу, вам необходимо запросить доступ.\n\nНажмите кнопку ниже, чтобы отправить запрос на доступ:".to_string(),
|
||||||
|
welcome_back: "👋 С возвращением, {name}!\n\nЧто вы хотите сделать?".to_string(),
|
||||||
|
request_pending: "👋 Привет!\n\nВаш запрос на доступ в настоящее время <b>{status}</b>.\n\nЗапрос отправлен: {date}".to_string(),
|
||||||
|
request_approved_status: "✅ одобрен".to_string(),
|
||||||
|
request_declined_status: "❌ отклонен".to_string(),
|
||||||
|
get_vpn_access: "🚀 Получить доступ к VPN".to_string(),
|
||||||
|
my_configs: "📋 Мои конфигурации".to_string(),
|
||||||
|
support: "💬 Поддержка".to_string(),
|
||||||
|
user_requests: "❔ Запросы пользователей".to_string(),
|
||||||
|
back: "🔙 Назад".to_string(),
|
||||||
|
approve: "✅ Одобрить".to_string(),
|
||||||
|
decline: "❌ Отклонить".to_string(),
|
||||||
|
|
||||||
|
already_pending: "⏳ У вас уже есть ожидающий рассмотрения запрос на доступ. Пожалуйста, дождитесь проверки администратором.".to_string(),
|
||||||
|
already_approved: "✅ Ваш запрос на доступ уже был одобрен. Используйте /start для доступа к главному меню.".to_string(),
|
||||||
|
already_declined: "❌ Ваш предыдущий запрос на доступ был отклонен. Пожалуйста, свяжитесь с администраторами, если считаете, что это ошибка.".to_string(),
|
||||||
|
request_submitted: "✅ Ваш запрос на доступ отправлен!\n\nАдминистратор скоро рассмотрит ваш запрос. Вы получите уведомление после обработки.".to_string(),
|
||||||
|
request_submit_failed: "❌ Не удалось отправить запрос: {error}".to_string(),
|
||||||
|
|
||||||
|
request_approved: "✅ Запрос одобрен".to_string(),
|
||||||
|
request_declined: "❌ Запрос отклонен".to_string(),
|
||||||
|
request_approved_notification: "🎉 <b>Ваш запрос на доступ одобрен!</b>\n\nДобро пожаловать в OutFleet VPN! Ваш аккаунт создан.\n\nID пользователя: <code>{user_id}</code>\n\nТеперь вы можете использовать /start для доступа к главному меню.".to_string(),
|
||||||
|
request_declined_notification: "❌ Ваш запрос на доступ отклонен.\n\nЕсли вы считаете, что это ошибка, пожалуйста, свяжитесь с администраторами.".to_string(),
|
||||||
|
|
||||||
|
new_access_request: "🔔 <b>Новый запрос на доступ</b>\n\n👤 Имя: {first_name} {last_name}\n🆔 Имя пользователя: @{username}\n\nИспользуйте /requests для просмотра".to_string(),
|
||||||
|
no_pending_requests: "Нет ожидающих запросов на доступ".to_string(),
|
||||||
|
access_request_details: "❔ <b>Запрос на доступ</b>\n\n👤 Имя: {full_name}\n🆔 Telegram: {telegram_link}\n📅 Запрошено: {date}\n\nСообщение: {message}".to_string(),
|
||||||
|
unauthorized: "❌ У вас нет прав для использования этой команды".to_string(),
|
||||||
|
request_approved_admin: "✅ Запрос одобрен".to_string(),
|
||||||
|
request_declined_admin: "❌ Запрос отклонен".to_string(),
|
||||||
|
user_creation_failed: "❌ Не удалось создать аккаунт пользователя: {error}\n\nПожалуйста, попробуйте еще раз или обратитесь в техническую поддержку.".to_string(),
|
||||||
|
|
||||||
|
support_info: "💬 <b>Информация о поддержке</b>\n\n📱 <b>Как подключиться:</b>\n1. Скачайте приложение v2raytun для Android или iOS с сайта:\n https://v2raytun.com/\n\n2. Добавьте ссылку подписки из меню \"🔗 Ссылка подписки\"\n ИЛИ\n Добавьте отдельные ссылки серверов из \"📋 Мои конфигурации\"\n\n3. Подключайтесь и наслаждайтесь безопасным VPN!\n\n❓ Если нужна помощь, обратитесь к администраторам.".to_string(),
|
||||||
|
|
||||||
|
statistics: "📊 <b>Статистика</b>\n\n👥 Всего пользователей: {users}\n🖥️ Всего серверов: {servers}\n📡 Всего входящих подключений: {inbounds}\n⏳ Ожидающих запросов: {pending}".to_string(),
|
||||||
|
total_users: "👥 Всего пользователей".to_string(),
|
||||||
|
total_servers: "🖥️ Всего серверов".to_string(),
|
||||||
|
total_inbounds: "📡 Всего входящих подключений".to_string(),
|
||||||
|
pending_requests: "⏳ Ожидающих запросов".to_string(),
|
||||||
|
|
||||||
|
broadcast_complete: "✅ Рассылка завершена\nОтправлено: {sent}\nНе удалось: {failed}".to_string(),
|
||||||
|
sent: "Отправлено".to_string(),
|
||||||
|
failed: "Не удалось".to_string(),
|
||||||
|
|
||||||
|
configs_coming_soon: "📋 Ваши конфигурации будут показаны здесь (скоро)".to_string(),
|
||||||
|
your_configurations: "📋 <b>Ваши конфигурации</b>".to_string(),
|
||||||
|
no_configs_available: "📋 Нет доступных конфигураций\n\nУ вас пока нет доступа к конфигурациям VPN. Пожалуйста, обратитесь к администратору для получения доступа.".to_string(),
|
||||||
|
config_copy_message: "📋 <b>{server_name}</b> - {inbound_tag} ({protocol})\n\nСсылка для подключения:".to_string(),
|
||||||
|
config_copied: "✅ Конфигурация скопирована в буфер обмена".to_string(),
|
||||||
|
config_not_found: "❌ Конфигурация не найдена".to_string(),
|
||||||
|
server_configs_title: "🖥️ <b>{server_name}</b> - Ссылки для подключения".to_string(),
|
||||||
|
|
||||||
|
subscription_link: "🔗 Ссылка подписки".to_string(),
|
||||||
|
|
||||||
|
manage_users: "👥 Управление пользователями".to_string(),
|
||||||
|
user_list: "👥 Список пользователей".to_string(),
|
||||||
|
user_details: "👤 Данные пользователя".to_string(),
|
||||||
|
manage_access: "🔧 Управление доступом".to_string(),
|
||||||
|
remove_access: "❌ Убрать доступ".to_string(),
|
||||||
|
grant_access: "✅ Предоставить доступ".to_string(),
|
||||||
|
user_info: "Информация о пользователе".to_string(),
|
||||||
|
no_users_found: "Пользователи не найдены".to_string(),
|
||||||
|
page_info: "Страница {page} из {total}".to_string(),
|
||||||
|
next_page: "Далее →".to_string(),
|
||||||
|
prev_page: "← Назад".to_string(),
|
||||||
|
back_to_users: "👥 К пользователям".to_string(),
|
||||||
|
back_to_menu: "🏠 Главное меню".to_string(),
|
||||||
|
access_updated: "✅ Доступ успешно обновлен".to_string(),
|
||||||
|
access_removed: "❌ Доступ успешно убран".to_string(),
|
||||||
|
access_granted: "✅ Доступ успешно предоставлен".to_string(),
|
||||||
|
|
||||||
|
error_occurred: "Произошла ошибка".to_string(),
|
||||||
|
admin_not_found: "Администратор не найден".to_string(),
|
||||||
|
request_not_found: "Запрос не найден".to_string(),
|
||||||
|
invalid_request_id: "Неверный ID запроса".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
199
src/services/telegram/mod.rs
Normal file
199
src/services/telegram/mod.rs
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use teloxide::{prelude::*, Bot};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::config::AppConfig;
|
||||||
|
use crate::database::entities::telegram_config::Model as TelegramConfig;
|
||||||
|
use crate::database::repository::TelegramConfigRepository;
|
||||||
|
use crate::database::DatabaseManager;
|
||||||
|
|
||||||
|
pub mod bot;
|
||||||
|
pub mod error;
|
||||||
|
pub mod handlers;
|
||||||
|
pub mod localization;
|
||||||
|
|
||||||
|
|
||||||
|
/// Main Telegram service that manages the bot lifecycle
|
||||||
|
pub struct TelegramService {
|
||||||
|
db: DatabaseManager,
|
||||||
|
app_config: AppConfig,
|
||||||
|
bot: Arc<RwLock<Option<Bot>>>,
|
||||||
|
config: Arc<RwLock<Option<TelegramConfig>>>,
|
||||||
|
shutdown_signal: Arc<RwLock<Option<tokio::sync::oneshot::Sender<()>>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TelegramService {
|
||||||
|
/// Create a new Telegram service
|
||||||
|
pub fn new(db: DatabaseManager, app_config: AppConfig) -> Self {
|
||||||
|
Self {
|
||||||
|
db,
|
||||||
|
app_config,
|
||||||
|
bot: Arc::new(RwLock::new(None)),
|
||||||
|
config: Arc::new(RwLock::new(None)),
|
||||||
|
shutdown_signal: Arc::new(RwLock::new(None)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Initialize and start the bot if active configuration exists
|
||||||
|
pub async fn initialize(&self) -> Result<()> {
|
||||||
|
let repo = TelegramConfigRepository::new(self.db.connection());
|
||||||
|
|
||||||
|
// Get active configuration
|
||||||
|
if let Some(config) = repo.get_active().await? {
|
||||||
|
self.start_with_config(config).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start bot with specific configuration
|
||||||
|
pub async fn start_with_config(&self, config: TelegramConfig) -> Result<()> {
|
||||||
|
// Stop existing bot if running
|
||||||
|
self.stop().await?;
|
||||||
|
|
||||||
|
// Create new bot instance
|
||||||
|
let bot = Bot::new(&config.bot_token);
|
||||||
|
|
||||||
|
// Verify token by calling getMe
|
||||||
|
match bot.get_me().await {
|
||||||
|
Ok(me) => {
|
||||||
|
let username = me.user.username.unwrap_or_default();
|
||||||
|
tracing::info!("Telegram bot started: @{}", username);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(anyhow::anyhow!("Invalid bot token: {}", e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store bot and config
|
||||||
|
*self.bot.write().await = Some(bot.clone());
|
||||||
|
*self.config.write().await = Some(config.clone());
|
||||||
|
|
||||||
|
// Start polling in background
|
||||||
|
if config.is_active {
|
||||||
|
self.start_polling(bot).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start polling for updates
|
||||||
|
async fn start_polling(&self, bot: Bot) -> Result<()> {
|
||||||
|
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||||
|
*self.shutdown_signal.write().await = Some(tx);
|
||||||
|
|
||||||
|
let db = self.db.clone();
|
||||||
|
let app_config = self.app_config.clone();
|
||||||
|
|
||||||
|
// Spawn polling task
|
||||||
|
tokio::spawn(async move {
|
||||||
|
bot::run_polling(bot, db, app_config, rx).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stop the bot
|
||||||
|
pub async fn stop(&self) -> Result<()> {
|
||||||
|
// Send shutdown signal if polling is running
|
||||||
|
if let Some(tx) = self.shutdown_signal.write().await.take() {
|
||||||
|
let _ = tx.send(()); // Ignore error if receiver is already dropped
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear bot and config
|
||||||
|
*self.bot.write().await = None;
|
||||||
|
*self.config.write().await = None;
|
||||||
|
|
||||||
|
tracing::info!("Telegram bot stopped");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update configuration and restart if needed
|
||||||
|
pub async fn update_config(&self, config_id: Uuid) -> Result<()> {
|
||||||
|
let repo = TelegramConfigRepository::new(self.db.connection());
|
||||||
|
|
||||||
|
if let Some(config) = repo.find_by_id(config_id).await? {
|
||||||
|
if config.is_active {
|
||||||
|
self.start_with_config(config).await?;
|
||||||
|
} else {
|
||||||
|
self.stop().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get current bot status
|
||||||
|
pub async fn get_status(&self) -> BotStatus {
|
||||||
|
let bot_guard = self.bot.read().await;
|
||||||
|
let config_guard = self.config.read().await;
|
||||||
|
|
||||||
|
BotStatus {
|
||||||
|
is_running: bot_guard.is_some(),
|
||||||
|
config: config_guard.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send message to user
|
||||||
|
pub async fn send_message(&self, chat_id: i64, text: String) -> Result<()> {
|
||||||
|
let bot_guard = self.bot.read().await;
|
||||||
|
|
||||||
|
if let Some(bot) = bot_guard.as_ref() {
|
||||||
|
bot.send_message(ChatId(chat_id), text).await?;
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(anyhow::anyhow!("Bot is not running"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send message to user with inline keyboard
|
||||||
|
pub async fn send_message_with_keyboard(
|
||||||
|
&self,
|
||||||
|
chat_id: i64,
|
||||||
|
text: String,
|
||||||
|
keyboard: teloxide::types::InlineKeyboardMarkup,
|
||||||
|
) -> Result<()> {
|
||||||
|
let bot_guard = self.bot.read().await;
|
||||||
|
|
||||||
|
if let Some(bot) = bot_guard.as_ref() {
|
||||||
|
bot.send_message(ChatId(chat_id), text)
|
||||||
|
.parse_mode(teloxide::types::ParseMode::Html)
|
||||||
|
.reply_markup(keyboard)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(anyhow::anyhow!("Bot is not running"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send message to all admins
|
||||||
|
pub async fn broadcast_to_admins(&self, text: String) -> Result<()> {
|
||||||
|
let bot_guard = self.bot.read().await;
|
||||||
|
|
||||||
|
if let Some(bot) = bot_guard.as_ref() {
|
||||||
|
let user_repo = crate::database::repository::UserRepository::new(self.db.connection());
|
||||||
|
let admins = user_repo.get_telegram_admins().await?;
|
||||||
|
|
||||||
|
for admin in admins {
|
||||||
|
if let Some(telegram_id) = admin.telegram_id {
|
||||||
|
if let Err(e) = bot.send_message(ChatId(telegram_id), text.clone()).await {
|
||||||
|
tracing::warn!("Failed to send message to admin {}: {}", telegram_id, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(anyhow::anyhow!("Bot is not running"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Bot status information
|
||||||
|
#[derive(Debug, Clone, serde::Serialize)]
|
||||||
|
pub struct BotStatus {
|
||||||
|
pub is_running: bool,
|
||||||
|
pub config: Option<TelegramConfig>,
|
||||||
|
}
|
||||||
134
src/services/uri_generator/builders/mod.rs
Normal file
134
src/services/uri_generator/builders/mod.rs
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
use crate::services::uri_generator::{error::UriGeneratorError, ClientConfigData};
|
||||||
|
|
||||||
|
pub mod shadowsocks;
|
||||||
|
pub mod trojan;
|
||||||
|
pub mod vless;
|
||||||
|
pub mod vmess;
|
||||||
|
|
||||||
|
pub use shadowsocks::ShadowsocksUriBuilder;
|
||||||
|
pub use trojan::TrojanUriBuilder;
|
||||||
|
pub use vless::VlessUriBuilder;
|
||||||
|
pub use vmess::VmessUriBuilder;
|
||||||
|
|
||||||
|
/// Common trait for all URI builders
|
||||||
|
pub trait UriBuilder {
|
||||||
|
/// Build URI string from client configuration data
|
||||||
|
fn build_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError>;
|
||||||
|
|
||||||
|
/// Validate configuration for this protocol
|
||||||
|
fn validate_config(&self, config: &ClientConfigData) -> Result<(), UriGeneratorError> {
|
||||||
|
if config.hostname.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"hostname".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.port <= 0 || config.port > 65535 {
|
||||||
|
return Err(UriGeneratorError::InvalidConfiguration(
|
||||||
|
"Invalid port number".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.xray_user_id.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"xray_user_id".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper functions for URI building
|
||||||
|
pub mod utils {
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
/// URL encode a string safely
|
||||||
|
pub fn url_encode(input: &str) -> String {
|
||||||
|
urlencoding::encode(input).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build query string from parameters
|
||||||
|
pub fn build_query_string(params: &HashMap<String, String>) -> String {
|
||||||
|
let mut query_parts: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
for (key, value) in params {
|
||||||
|
if !value.is_empty() {
|
||||||
|
query_parts.push(format!("{}={}", url_encode(key), url_encode(value)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query_parts.join("&")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract transport type from stream settings
|
||||||
|
pub fn extract_transport_type(stream_settings: &Value) -> String {
|
||||||
|
stream_settings
|
||||||
|
.get("network")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("tcp")
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract security type from stream settings
|
||||||
|
pub fn extract_security_type(stream_settings: &Value, has_certificate: bool) -> String {
|
||||||
|
if has_certificate {
|
||||||
|
stream_settings
|
||||||
|
.get("security")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("tls")
|
||||||
|
.to_string()
|
||||||
|
} else {
|
||||||
|
"none".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract WebSocket path from stream settings
|
||||||
|
pub fn extract_ws_path(stream_settings: &Value) -> Option<String> {
|
||||||
|
stream_settings
|
||||||
|
.get("wsSettings")
|
||||||
|
.and_then(|ws| ws.get("path"))
|
||||||
|
.and_then(|p| p.as_str())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract WebSocket host from stream settings
|
||||||
|
pub fn extract_ws_host(stream_settings: &Value) -> Option<String> {
|
||||||
|
stream_settings
|
||||||
|
.get("wsSettings")
|
||||||
|
.and_then(|ws| ws.get("headers"))
|
||||||
|
.and_then(|headers| headers.get("Host"))
|
||||||
|
.and_then(|host| host.as_str())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract gRPC service name from stream settings
|
||||||
|
pub fn extract_grpc_service_name(stream_settings: &Value) -> Option<String> {
|
||||||
|
stream_settings
|
||||||
|
.get("grpcSettings")
|
||||||
|
.and_then(|grpc| grpc.get("serviceName"))
|
||||||
|
.and_then(|name| name.as_str())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract TLS SNI from stream settings
|
||||||
|
pub fn extract_tls_sni(
|
||||||
|
stream_settings: &Value,
|
||||||
|
certificate_domain: Option<&str>,
|
||||||
|
) -> Option<String> {
|
||||||
|
// Try stream settings first
|
||||||
|
if let Some(sni) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("serverName"))
|
||||||
|
.and_then(|sni| sni.as_str())
|
||||||
|
{
|
||||||
|
return Some(sni.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to certificate domain
|
||||||
|
certificate_domain.map(|s| s.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determine alias for the URI
|
||||||
|
pub fn generate_alias(server_name: &str, template_name: &str) -> String {
|
||||||
|
format!("{} - {}", server_name, template_name)
|
||||||
|
}
|
||||||
|
}
|
||||||
102
src/services/uri_generator/builders/shadowsocks.rs
Normal file
102
src/services/uri_generator/builders/shadowsocks.rs
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
|
|
||||||
|
use super::{utils, UriBuilder};
|
||||||
|
use crate::services::uri_generator::{error::UriGeneratorError, ClientConfigData};
|
||||||
|
|
||||||
|
pub struct ShadowsocksUriBuilder;
|
||||||
|
|
||||||
|
impl ShadowsocksUriBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map Xray cipher type to Shadowsocks method name
|
||||||
|
fn map_xray_cipher_to_shadowsocks_method(&self, cipher: &str) -> &str {
|
||||||
|
match cipher {
|
||||||
|
// AES GCM variants
|
||||||
|
"AES_256_GCM" | "aes-256-gcm" => "aes-256-gcm",
|
||||||
|
"AES_128_GCM" | "aes-128-gcm" => "aes-128-gcm",
|
||||||
|
|
||||||
|
// ChaCha20 variants
|
||||||
|
"CHACHA20_POLY1305" | "chacha20-ietf-poly1305" | "chacha20-poly1305" => {
|
||||||
|
"chacha20-ietf-poly1305"
|
||||||
|
}
|
||||||
|
|
||||||
|
// AES CFB variants
|
||||||
|
"AES_256_CFB" | "aes-256-cfb" => "aes-256-cfb",
|
||||||
|
"AES_128_CFB" | "aes-128-cfb" => "aes-128-cfb",
|
||||||
|
|
||||||
|
// Legacy ciphers
|
||||||
|
"RC4_MD5" | "rc4-md5" => "rc4-md5",
|
||||||
|
"AES_256_CTR" | "aes-256-ctr" => "aes-256-ctr",
|
||||||
|
"AES_128_CTR" | "aes-128-ctr" => "aes-128-ctr",
|
||||||
|
|
||||||
|
// Default to most secure and widely supported
|
||||||
|
_ => "aes-256-gcm",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UriBuilder for ShadowsocksUriBuilder {
|
||||||
|
fn build_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError> {
|
||||||
|
self.validate_config(config)?;
|
||||||
|
|
||||||
|
// Get cipher type from base_settings and map to Shadowsocks method
|
||||||
|
let cipher = config
|
||||||
|
.base_settings
|
||||||
|
.get("cipherType")
|
||||||
|
.and_then(|c| c.as_str())
|
||||||
|
.or_else(|| config.base_settings.get("method").and_then(|m| m.as_str()))
|
||||||
|
.unwrap_or("AES_256_GCM");
|
||||||
|
|
||||||
|
let method = self.map_xray_cipher_to_shadowsocks_method(cipher);
|
||||||
|
|
||||||
|
// Shadowsocks SIP002 format: ss://base64(method:password)@hostname:port#remark
|
||||||
|
// Use xray_user_id as password (following Marzban approach)
|
||||||
|
let credentials = format!("{}:{}", method, config.xray_user_id);
|
||||||
|
let encoded_credentials = general_purpose::STANDARD.encode(credentials.as_bytes());
|
||||||
|
|
||||||
|
// Generate alias for the URI
|
||||||
|
let alias = utils::generate_alias(&config.server_name, &config.template_name);
|
||||||
|
|
||||||
|
// Build simple SIP002 URI (no plugin parameters for standard Shadowsocks)
|
||||||
|
let uri = format!(
|
||||||
|
"ss://{}@{}:{}#{}",
|
||||||
|
encoded_credentials,
|
||||||
|
config.hostname,
|
||||||
|
config.port,
|
||||||
|
utils::url_encode(&alias)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(uri)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_config(&self, config: &ClientConfigData) -> Result<(), UriGeneratorError> {
|
||||||
|
// Basic validation
|
||||||
|
if config.hostname.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"hostname".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.port <= 0 || config.port > 65535 {
|
||||||
|
return Err(UriGeneratorError::InvalidConfiguration(
|
||||||
|
"Invalid port number".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.xray_user_id.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"xray_user_id".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shadowsocks uses xray_user_id as password, already validated above
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ShadowsocksUriBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
222
src/services/uri_generator/builders/trojan.rs
Normal file
222
src/services/uri_generator/builders/trojan.rs
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{utils, UriBuilder};
|
||||||
|
use crate::services::uri_generator::{error::UriGeneratorError, ClientConfigData};
|
||||||
|
|
||||||
|
pub struct TrojanUriBuilder;
|
||||||
|
|
||||||
|
impl TrojanUriBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UriBuilder for TrojanUriBuilder {
|
||||||
|
fn build_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError> {
|
||||||
|
self.validate_config(config)?;
|
||||||
|
|
||||||
|
// Trojan uses xray_user_id as password
|
||||||
|
let password = &config.xray_user_id;
|
||||||
|
|
||||||
|
// Apply variable substitution to stream settings
|
||||||
|
let stream_settings = if !config.variable_values.is_null() {
|
||||||
|
apply_variables(&config.stream_settings, &config.variable_values)?
|
||||||
|
} else {
|
||||||
|
config.stream_settings.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut params = HashMap::new();
|
||||||
|
|
||||||
|
// Determine security layer (Trojan typically uses TLS)
|
||||||
|
let has_certificate = config.certificate_domain.is_some();
|
||||||
|
let security = utils::extract_security_type(&stream_settings, has_certificate);
|
||||||
|
|
||||||
|
// Trojan usually requires TLS, but allow other security types
|
||||||
|
if security != "none" {
|
||||||
|
params.insert("security".to_string(), security.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transport type - always specify explicitly
|
||||||
|
let transport_type = utils::extract_transport_type(&stream_settings);
|
||||||
|
params.insert("type".to_string(), transport_type.clone());
|
||||||
|
|
||||||
|
// Transport-specific parameters
|
||||||
|
match transport_type.as_str() {
|
||||||
|
"ws" => {
|
||||||
|
if let Some(path) = utils::extract_ws_path(&stream_settings) {
|
||||||
|
params.insert("path".to_string(), path);
|
||||||
|
}
|
||||||
|
if let Some(host) = utils::extract_ws_host(&stream_settings) {
|
||||||
|
params.insert("host".to_string(), host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"grpc" => {
|
||||||
|
if let Some(service_name) = utils::extract_grpc_service_name(&stream_settings) {
|
||||||
|
params.insert("serviceName".to_string(), service_name);
|
||||||
|
}
|
||||||
|
// gRPC mode for Trojan
|
||||||
|
params.insert("mode".to_string(), "gun".to_string());
|
||||||
|
}
|
||||||
|
"tcp" => {
|
||||||
|
// Check for HTTP header type
|
||||||
|
if let Some(header_type) = stream_settings
|
||||||
|
.get("tcpSettings")
|
||||||
|
.and_then(|tcp| tcp.get("header"))
|
||||||
|
.and_then(|header| header.get("type"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
{
|
||||||
|
if header_type != "none" {
|
||||||
|
params.insert("headerType".to_string(), header_type.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {} // Other transport types
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS/Security specific parameters
|
||||||
|
if security == "tls" || security == "reality" {
|
||||||
|
if let Some(sni) =
|
||||||
|
utils::extract_tls_sni(&stream_settings, config.certificate_domain.as_deref())
|
||||||
|
{
|
||||||
|
params.insert("sni".to_string(), sni);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS fingerprint
|
||||||
|
if let Some(fp) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("fingerprint"))
|
||||||
|
.and_then(|fp| fp.as_str())
|
||||||
|
{
|
||||||
|
params.insert("fp".to_string(), fp.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// ALPN
|
||||||
|
if let Some(alpn) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("alpn"))
|
||||||
|
.and_then(|alpn| alpn.as_array())
|
||||||
|
{
|
||||||
|
let alpn_str = alpn
|
||||||
|
.iter()
|
||||||
|
.filter_map(|v| v.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",");
|
||||||
|
if !alpn_str.is_empty() {
|
||||||
|
params.insert("alpn".to_string(), alpn_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow insecure connections (optional)
|
||||||
|
if let Some(allow_insecure) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("allowInsecure"))
|
||||||
|
.and_then(|ai| ai.as_bool())
|
||||||
|
{
|
||||||
|
if allow_insecure {
|
||||||
|
params.insert("allowInsecure".to_string(), "1".to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// REALITY specific parameters
|
||||||
|
if security == "reality" {
|
||||||
|
if let Some(pbk) = stream_settings
|
||||||
|
.get("realitySettings")
|
||||||
|
.and_then(|reality| reality.get("publicKey"))
|
||||||
|
.and_then(|pbk| pbk.as_str())
|
||||||
|
{
|
||||||
|
params.insert("pbk".to_string(), pbk.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(sid) = stream_settings
|
||||||
|
.get("realitySettings")
|
||||||
|
.and_then(|reality| reality.get("shortId"))
|
||||||
|
.and_then(|sid| sid.as_str())
|
||||||
|
{
|
||||||
|
params.insert("sid".to_string(), sid.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flow control for XTLS (if supported)
|
||||||
|
if let Some(flow) = stream_settings.get("flow").and_then(|f| f.as_str()) {
|
||||||
|
params.insert("flow".to_string(), flow.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the URI
|
||||||
|
let query_string = utils::build_query_string(¶ms);
|
||||||
|
let alias = utils::generate_alias(&config.server_name, &config.template_name);
|
||||||
|
|
||||||
|
let uri = if query_string.is_empty() {
|
||||||
|
format!(
|
||||||
|
"trojan://{}@{}:{}#{}",
|
||||||
|
utils::url_encode(password),
|
||||||
|
config.hostname,
|
||||||
|
config.port,
|
||||||
|
utils::url_encode(&alias)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"trojan://{}@{}:{}?{}#{}",
|
||||||
|
utils::url_encode(password),
|
||||||
|
config.hostname,
|
||||||
|
config.port,
|
||||||
|
query_string,
|
||||||
|
utils::url_encode(&alias)
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(uri)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_config(&self, config: &ClientConfigData) -> Result<(), UriGeneratorError> {
|
||||||
|
// Basic validation
|
||||||
|
if config.hostname.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"hostname".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.port <= 0 || config.port > 65535 {
|
||||||
|
return Err(UriGeneratorError::InvalidConfiguration(
|
||||||
|
"Invalid port number".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if config.xray_user_id.is_empty() {
|
||||||
|
return Err(UriGeneratorError::MissingRequiredField(
|
||||||
|
"xray_user_id".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trojan uses xray_user_id as password, already validated above
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TrojanUriBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply variable substitution to JSON value
|
||||||
|
fn apply_variables(template: &Value, variables: &Value) -> Result<Value, UriGeneratorError> {
|
||||||
|
let template_str = template.to_string();
|
||||||
|
let mut result = template_str;
|
||||||
|
|
||||||
|
if let Value::Object(var_map) = variables {
|
||||||
|
for (key, value) in var_map {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
Value::Bool(b) => b.to_string(),
|
||||||
|
_ => value.to_string().trim_matches('"').to_string(),
|
||||||
|
};
|
||||||
|
result = result.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_str(&result)
|
||||||
|
.map_err(|e| UriGeneratorError::VariableSubstitution(e.to_string()))
|
||||||
|
}
|
||||||
171
src/services/uri_generator/builders/vless.rs
Normal file
171
src/services/uri_generator/builders/vless.rs
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{utils, UriBuilder};
|
||||||
|
use crate::services::uri_generator::{error::UriGeneratorError, ClientConfigData};
|
||||||
|
|
||||||
|
pub struct VlessUriBuilder;
|
||||||
|
|
||||||
|
impl VlessUriBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UriBuilder for VlessUriBuilder {
|
||||||
|
fn build_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError> {
|
||||||
|
self.validate_config(config)?;
|
||||||
|
|
||||||
|
// Apply variable substitution to stream settings
|
||||||
|
let stream_settings = if !config.variable_values.is_null() {
|
||||||
|
// Simple variable substitution for stream settings
|
||||||
|
apply_variables(&config.stream_settings, &config.variable_values)?
|
||||||
|
} else {
|
||||||
|
config.stream_settings.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut params = HashMap::new();
|
||||||
|
|
||||||
|
// VLESS always uses no encryption
|
||||||
|
params.insert("encryption".to_string(), "none".to_string());
|
||||||
|
|
||||||
|
// Determine security layer
|
||||||
|
let has_certificate = config.certificate_domain.is_some();
|
||||||
|
let security = utils::extract_security_type(&stream_settings, has_certificate);
|
||||||
|
if security != "none" {
|
||||||
|
params.insert("security".to_string(), security.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transport type - always specify explicitly
|
||||||
|
let transport_type = utils::extract_transport_type(&stream_settings);
|
||||||
|
params.insert("type".to_string(), transport_type.clone());
|
||||||
|
|
||||||
|
// Transport-specific parameters
|
||||||
|
match transport_type.as_str() {
|
||||||
|
"ws" => {
|
||||||
|
if let Some(path) = utils::extract_ws_path(&stream_settings) {
|
||||||
|
params.insert("path".to_string(), path);
|
||||||
|
}
|
||||||
|
if let Some(host) = utils::extract_ws_host(&stream_settings) {
|
||||||
|
params.insert("host".to_string(), host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"grpc" => {
|
||||||
|
if let Some(service_name) = utils::extract_grpc_service_name(&stream_settings) {
|
||||||
|
params.insert("serviceName".to_string(), service_name);
|
||||||
|
}
|
||||||
|
// Default gRPC mode
|
||||||
|
params.insert("mode".to_string(), "gun".to_string());
|
||||||
|
}
|
||||||
|
"tcp" => {
|
||||||
|
// Check for HTTP header type
|
||||||
|
if let Some(header_type) = stream_settings
|
||||||
|
.get("tcpSettings")
|
||||||
|
.and_then(|tcp| tcp.get("header"))
|
||||||
|
.and_then(|header| header.get("type"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
{
|
||||||
|
if header_type != "none" {
|
||||||
|
params.insert("headerType".to_string(), header_type.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {} // Other transport types can be added as needed
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS/Security specific parameters
|
||||||
|
if security == "tls" || security == "reality" {
|
||||||
|
if let Some(sni) =
|
||||||
|
utils::extract_tls_sni(&stream_settings, config.certificate_domain.as_deref())
|
||||||
|
{
|
||||||
|
params.insert("sni".to_string(), sni);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS fingerprint
|
||||||
|
if let Some(fp) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("fingerprint"))
|
||||||
|
.and_then(|fp| fp.as_str())
|
||||||
|
{
|
||||||
|
params.insert("fp".to_string(), fp.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// REALITY specific parameters
|
||||||
|
if security == "reality" {
|
||||||
|
if let Some(pbk) = stream_settings
|
||||||
|
.get("realitySettings")
|
||||||
|
.and_then(|reality| reality.get("publicKey"))
|
||||||
|
.and_then(|pbk| pbk.as_str())
|
||||||
|
{
|
||||||
|
params.insert("pbk".to_string(), pbk.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(sid) = stream_settings
|
||||||
|
.get("realitySettings")
|
||||||
|
.and_then(|reality| reality.get("shortId"))
|
||||||
|
.and_then(|sid| sid.as_str())
|
||||||
|
{
|
||||||
|
params.insert("sid".to_string(), sid.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flow control for XTLS
|
||||||
|
if let Some(flow) = stream_settings.get("flow").and_then(|f| f.as_str()) {
|
||||||
|
params.insert("flow".to_string(), flow.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the URI
|
||||||
|
let query_string = utils::build_query_string(¶ms);
|
||||||
|
let alias = utils::generate_alias(&config.server_name, &config.template_name);
|
||||||
|
|
||||||
|
let uri = if query_string.is_empty() {
|
||||||
|
format!(
|
||||||
|
"vless://{}@{}:{}#{}",
|
||||||
|
config.xray_user_id,
|
||||||
|
config.hostname,
|
||||||
|
config.port,
|
||||||
|
utils::url_encode(&alias)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"vless://{}@{}:{}?{}#{}",
|
||||||
|
config.xray_user_id,
|
||||||
|
config.hostname,
|
||||||
|
config.port,
|
||||||
|
query_string,
|
||||||
|
utils::url_encode(&alias)
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(uri)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for VlessUriBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply variable substitution to JSON value
|
||||||
|
fn apply_variables(template: &Value, variables: &Value) -> Result<Value, UriGeneratorError> {
|
||||||
|
let template_str = template.to_string();
|
||||||
|
let mut result = template_str;
|
||||||
|
|
||||||
|
if let Value::Object(var_map) = variables {
|
||||||
|
for (key, value) in var_map {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
Value::Bool(b) => b.to_string(),
|
||||||
|
_ => value.to_string().trim_matches('"').to_string(),
|
||||||
|
};
|
||||||
|
result = result.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_str(&result)
|
||||||
|
.map_err(|e| UriGeneratorError::VariableSubstitution(e.to_string()))
|
||||||
|
}
|
||||||
183
src/services/uri_generator/builders/vmess.rs
Normal file
183
src/services/uri_generator/builders/vmess.rs
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use super::{utils, UriBuilder};
|
||||||
|
use crate::services::uri_generator::{error::UriGeneratorError, ClientConfigData};
|
||||||
|
|
||||||
|
pub struct VmessUriBuilder;
|
||||||
|
|
||||||
|
impl VmessUriBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build VMess URI in Base64 JSON format (following Marzban approach)
|
||||||
|
fn build_base64_json_uri(
|
||||||
|
&self,
|
||||||
|
config: &ClientConfigData,
|
||||||
|
) -> Result<String, UriGeneratorError> {
|
||||||
|
// Apply variable substitution to stream settings
|
||||||
|
let stream_settings = if !config.variable_values.is_null() {
|
||||||
|
apply_variables(&config.stream_settings, &config.variable_values)?
|
||||||
|
} else {
|
||||||
|
config.stream_settings.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let transport_type = utils::extract_transport_type(&stream_settings);
|
||||||
|
let has_certificate = config.certificate_domain.is_some();
|
||||||
|
let security = utils::extract_security_type(&stream_settings, has_certificate);
|
||||||
|
|
||||||
|
// Build VMess JSON configuration following Marzban structure
|
||||||
|
let mut vmess_config = json!({
|
||||||
|
"add": config.hostname,
|
||||||
|
"aid": "0",
|
||||||
|
"host": "",
|
||||||
|
"id": config.xray_user_id,
|
||||||
|
"net": transport_type,
|
||||||
|
"path": "",
|
||||||
|
"port": config.port,
|
||||||
|
"ps": utils::generate_alias(&config.server_name, &config.template_name),
|
||||||
|
"scy": "auto",
|
||||||
|
"tls": if security == "none" { "none" } else { &security },
|
||||||
|
"type": "none",
|
||||||
|
"v": "2"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Transport-specific settings
|
||||||
|
match transport_type.as_str() {
|
||||||
|
"ws" => {
|
||||||
|
if let Some(path) = utils::extract_ws_path(&stream_settings) {
|
||||||
|
vmess_config["path"] = Value::String(path);
|
||||||
|
}
|
||||||
|
if let Some(host) = utils::extract_ws_host(&stream_settings) {
|
||||||
|
vmess_config["host"] = Value::String(host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"grpc" => {
|
||||||
|
if let Some(service_name) = utils::extract_grpc_service_name(&stream_settings) {
|
||||||
|
vmess_config["path"] = Value::String(service_name);
|
||||||
|
}
|
||||||
|
// For gRPC in VMess, use "gun" type
|
||||||
|
vmess_config["type"] = Value::String("gun".to_string());
|
||||||
|
}
|
||||||
|
"tcp" => {
|
||||||
|
// Check for HTTP header type
|
||||||
|
if let Some(header_type) = stream_settings
|
||||||
|
.get("tcpSettings")
|
||||||
|
.and_then(|tcp| tcp.get("header"))
|
||||||
|
.and_then(|header| header.get("type"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
{
|
||||||
|
vmess_config["type"] = Value::String(header_type.to_string());
|
||||||
|
|
||||||
|
// If HTTP headers, get host and path
|
||||||
|
if header_type == "http" {
|
||||||
|
if let Some(host) = stream_settings
|
||||||
|
.get("tcpSettings")
|
||||||
|
.and_then(|tcp| tcp.get("header"))
|
||||||
|
.and_then(|header| header.get("request"))
|
||||||
|
.and_then(|request| request.get("headers"))
|
||||||
|
.and_then(|headers| headers.get("Host"))
|
||||||
|
.and_then(|host| host.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|h| h.as_str())
|
||||||
|
{
|
||||||
|
vmess_config["host"] = Value::String(host.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(path) = stream_settings
|
||||||
|
.get("tcpSettings")
|
||||||
|
.and_then(|tcp| tcp.get("header"))
|
||||||
|
.and_then(|header| header.get("request"))
|
||||||
|
.and_then(|request| request.get("path"))
|
||||||
|
.and_then(|path| path.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|p| p.as_str())
|
||||||
|
{
|
||||||
|
vmess_config["path"] = Value::String(path.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {} // Other transport types
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS settings
|
||||||
|
if security != "none" {
|
||||||
|
if let Some(sni) =
|
||||||
|
utils::extract_tls_sni(&stream_settings, config.certificate_domain.as_deref())
|
||||||
|
{
|
||||||
|
vmess_config["sni"] = Value::String(sni);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TLS fingerprint
|
||||||
|
if let Some(fp) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("fingerprint"))
|
||||||
|
.and_then(|fp| fp.as_str())
|
||||||
|
{
|
||||||
|
vmess_config["fp"] = Value::String(fp.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// ALPN
|
||||||
|
if let Some(alpn) = stream_settings
|
||||||
|
.get("tlsSettings")
|
||||||
|
.and_then(|tls| tls.get("alpn"))
|
||||||
|
.and_then(|alpn| alpn.as_array())
|
||||||
|
{
|
||||||
|
let alpn_str = alpn
|
||||||
|
.iter()
|
||||||
|
.filter_map(|v| v.as_str())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",");
|
||||||
|
if !alpn_str.is_empty() {
|
||||||
|
vmess_config["alpn"] = Value::String(alpn_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to JSON string and encode in Base64
|
||||||
|
let json_string = vmess_config.to_string();
|
||||||
|
let encoded = general_purpose::STANDARD.encode(json_string.as_bytes());
|
||||||
|
|
||||||
|
Ok(format!("vmess://{}", encoded))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UriBuilder for VmessUriBuilder {
|
||||||
|
fn build_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError> {
|
||||||
|
self.validate_config(config)?;
|
||||||
|
|
||||||
|
// Prefer Base64 JSON format as it's more widely supported
|
||||||
|
self.build_base64_json_uri(config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for VmessUriBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply variable substitution to JSON value
|
||||||
|
fn apply_variables(template: &Value, variables: &Value) -> Result<Value, UriGeneratorError> {
|
||||||
|
let template_str = template.to_string();
|
||||||
|
let mut result = template_str;
|
||||||
|
|
||||||
|
if let Value::Object(var_map) = variables {
|
||||||
|
for (key, value) in var_map {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
Value::Bool(b) => b.to_string(),
|
||||||
|
_ => value.to_string().trim_matches('"').to_string(),
|
||||||
|
};
|
||||||
|
result = result.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_str(&result)
|
||||||
|
.map_err(|e| UriGeneratorError::VariableSubstitution(e.to_string()))
|
||||||
|
}
|
||||||
51
src/services/uri_generator/error.rs
Normal file
51
src/services/uri_generator/error.rs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum UriGeneratorError {
|
||||||
|
UnsupportedProtocol(String),
|
||||||
|
MissingRequiredField(String),
|
||||||
|
InvalidConfiguration(String),
|
||||||
|
VariableSubstitution(String),
|
||||||
|
JsonParsing(String),
|
||||||
|
UriEncoding(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for UriGeneratorError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
UriGeneratorError::UnsupportedProtocol(protocol) => {
|
||||||
|
write!(f, "Unsupported protocol: {}", protocol)
|
||||||
|
}
|
||||||
|
UriGeneratorError::MissingRequiredField(field) => {
|
||||||
|
write!(f, "Missing required field: {}", field)
|
||||||
|
}
|
||||||
|
UriGeneratorError::InvalidConfiguration(msg) => {
|
||||||
|
write!(f, "Invalid configuration: {}", msg)
|
||||||
|
}
|
||||||
|
UriGeneratorError::VariableSubstitution(msg) => {
|
||||||
|
write!(f, "Variable substitution error: {}", msg)
|
||||||
|
}
|
||||||
|
UriGeneratorError::JsonParsing(msg) => {
|
||||||
|
write!(f, "JSON parsing error: {}", msg)
|
||||||
|
}
|
||||||
|
UriGeneratorError::UriEncoding(msg) => {
|
||||||
|
write!(f, "URI encoding error: {}", msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for UriGeneratorError {}
|
||||||
|
|
||||||
|
impl From<serde_json::Error> for UriGeneratorError {
|
||||||
|
fn from(err: serde_json::Error) -> Self {
|
||||||
|
UriGeneratorError::JsonParsing(err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: urlencoding crate doesn't have EncodingError in current version
|
||||||
|
// impl From<urlencoding::EncodingError> for UriGeneratorError {
|
||||||
|
// fn from(err: urlencoding::EncodingError) -> Self {
|
||||||
|
// UriGeneratorError::UriEncoding(err.to_string())
|
||||||
|
// }
|
||||||
|
// }
|
||||||
404
src/services/uri_generator/mod.rs
Normal file
404
src/services/uri_generator/mod.rs
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub mod builders;
|
||||||
|
pub mod error;
|
||||||
|
|
||||||
|
use builders::{
|
||||||
|
ShadowsocksUriBuilder, TrojanUriBuilder, UriBuilder, VlessUriBuilder, VmessUriBuilder,
|
||||||
|
};
|
||||||
|
use error::UriGeneratorError;
|
||||||
|
|
||||||
|
/// Complete client configuration data aggregated from database
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ClientConfigData {
|
||||||
|
// User credentials
|
||||||
|
pub user_name: String,
|
||||||
|
pub xray_user_id: String,
|
||||||
|
pub password: Option<String>,
|
||||||
|
pub level: i32,
|
||||||
|
|
||||||
|
// Server connection
|
||||||
|
pub hostname: String,
|
||||||
|
pub port: i32,
|
||||||
|
|
||||||
|
// Protocol & transport
|
||||||
|
pub protocol: String,
|
||||||
|
pub stream_settings: Value,
|
||||||
|
pub base_settings: Value,
|
||||||
|
|
||||||
|
// Security
|
||||||
|
pub certificate_domain: Option<String>,
|
||||||
|
pub requires_tls: bool,
|
||||||
|
|
||||||
|
// Variable substitution
|
||||||
|
pub variable_values: Value,
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
pub server_name: String,
|
||||||
|
pub inbound_tag: String,
|
||||||
|
pub template_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generated client configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ClientConfig {
|
||||||
|
pub user_id: Uuid,
|
||||||
|
pub server_name: String,
|
||||||
|
pub inbound_tag: String,
|
||||||
|
pub template_name: String,
|
||||||
|
pub protocol: String,
|
||||||
|
pub uri: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub qr_code: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// URI Generator Service
|
||||||
|
pub struct UriGeneratorService;
|
||||||
|
|
||||||
|
impl UriGeneratorService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate URI for specific protocol and configuration
|
||||||
|
pub fn generate_uri(&self, config: &ClientConfigData) -> Result<String, UriGeneratorError> {
|
||||||
|
let protocol = config.protocol.as_str();
|
||||||
|
|
||||||
|
match protocol {
|
||||||
|
"vless" => {
|
||||||
|
let builder = VlessUriBuilder::new();
|
||||||
|
builder.build_uri(config)
|
||||||
|
}
|
||||||
|
"vmess" => {
|
||||||
|
let builder = VmessUriBuilder::new();
|
||||||
|
builder.build_uri(config)
|
||||||
|
}
|
||||||
|
"trojan" => {
|
||||||
|
let builder = TrojanUriBuilder::new();
|
||||||
|
builder.build_uri(config)
|
||||||
|
}
|
||||||
|
"shadowsocks" => {
|
||||||
|
let builder = ShadowsocksUriBuilder::new();
|
||||||
|
builder.build_uri(config)
|
||||||
|
}
|
||||||
|
_ => Err(UriGeneratorError::UnsupportedProtocol(protocol.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate complete client configuration
|
||||||
|
pub fn generate_client_config(
|
||||||
|
&self,
|
||||||
|
user_id: Uuid,
|
||||||
|
config: &ClientConfigData,
|
||||||
|
) -> Result<ClientConfig, UriGeneratorError> {
|
||||||
|
let uri = self.generate_uri(config)?;
|
||||||
|
|
||||||
|
Ok(ClientConfig {
|
||||||
|
user_id,
|
||||||
|
server_name: config.server_name.clone(),
|
||||||
|
inbound_tag: config.inbound_tag.clone(),
|
||||||
|
template_name: config.template_name.clone(),
|
||||||
|
protocol: config.protocol.clone(),
|
||||||
|
uri,
|
||||||
|
qr_code: None, // TODO: Implement QR code generation if needed
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply variable substitution to JSON values (for testing)
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn apply_variable_substitution(
|
||||||
|
&self,
|
||||||
|
template: &Value,
|
||||||
|
variables: &Value,
|
||||||
|
) -> Result<Value, UriGeneratorError> {
|
||||||
|
let template_str = template.to_string();
|
||||||
|
let mut result = template_str;
|
||||||
|
|
||||||
|
if let Value::Object(var_map) = variables {
|
||||||
|
for (key, value) in var_map {
|
||||||
|
let placeholder = format!("${{{}}}", key);
|
||||||
|
let replacement = match value {
|
||||||
|
Value::String(s) => s.clone(),
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
Value::Bool(b) => b.to_string(),
|
||||||
|
_ => value.to_string().trim_matches('"').to_string(),
|
||||||
|
};
|
||||||
|
result = result.replace(&placeholder, &replacement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_str(&result)
|
||||||
|
.map_err(|e| UriGeneratorError::VariableSubstitution(e.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for UriGeneratorService {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
fn create_test_config(protocol: &str) -> ClientConfigData {
|
||||||
|
ClientConfigData {
|
||||||
|
user_name: "testuser".to_string(),
|
||||||
|
xray_user_id: "test-uuid-123".to_string(),
|
||||||
|
password: Some("test-password".to_string()),
|
||||||
|
level: 0,
|
||||||
|
hostname: "example.com".to_string(),
|
||||||
|
port: 8443,
|
||||||
|
protocol: protocol.to_string(),
|
||||||
|
stream_settings: json!({
|
||||||
|
"network": "tcp",
|
||||||
|
"security": "tls"
|
||||||
|
}),
|
||||||
|
base_settings: json!({
|
||||||
|
"clients": []
|
||||||
|
}),
|
||||||
|
certificate_domain: Some("example.com".to_string()),
|
||||||
|
requires_tls: true,
|
||||||
|
variable_values: json!({
|
||||||
|
"domain": "example.com",
|
||||||
|
"port": "8443"
|
||||||
|
}),
|
||||||
|
server_name: "test-server".to_string(),
|
||||||
|
inbound_tag: "test-inbound".to_string(),
|
||||||
|
template_name: "test-template".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_uri_generator_service_creation() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
// Service should be created successfully
|
||||||
|
assert_eq!(std::mem::size_of_val(&service), 0); // Zero-sized struct
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uri_vless() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config = create_test_config("vless");
|
||||||
|
|
||||||
|
let result = service.generate_uri(&config);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let uri = result.unwrap();
|
||||||
|
assert!(uri.starts_with("vless://"));
|
||||||
|
assert!(uri.contains("test-uuid-123"));
|
||||||
|
assert!(uri.contains("example.com:8443"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uri_vmess() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config = create_test_config("vmess");
|
||||||
|
|
||||||
|
let result = service.generate_uri(&config);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let uri = result.unwrap();
|
||||||
|
assert!(uri.starts_with("vmess://"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uri_trojan() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config = create_test_config("trojan");
|
||||||
|
|
||||||
|
let result = service.generate_uri(&config);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let uri = result.unwrap();
|
||||||
|
assert!(uri.starts_with("trojan://"));
|
||||||
|
assert!(uri.contains("test-uuid-123")); // trojan uses xray_user_id as password
|
||||||
|
assert!(uri.contains("example.com:8443"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uri_shadowsocks() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config = create_test_config("shadowsocks");
|
||||||
|
|
||||||
|
let result = service.generate_uri(&config);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let uri = result.unwrap();
|
||||||
|
assert!(uri.starts_with("ss://"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uri_unsupported_protocol() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config = create_test_config("unsupported");
|
||||||
|
|
||||||
|
let result = service.generate_uri(&config);
|
||||||
|
assert!(result.is_err());
|
||||||
|
|
||||||
|
match result.unwrap_err() {
|
||||||
|
UriGeneratorError::UnsupportedProtocol(protocol) => {
|
||||||
|
assert_eq!(protocol, "unsupported");
|
||||||
|
}
|
||||||
|
_ => panic!("Expected UnsupportedProtocol error"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_client_config() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
let config_data = create_test_config("vless");
|
||||||
|
let user_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
let result = service.generate_client_config(user_id, &config_data);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let client_config = result.unwrap();
|
||||||
|
assert_eq!(client_config.user_id, user_id);
|
||||||
|
assert_eq!(client_config.server_name, "test-server");
|
||||||
|
assert_eq!(client_config.inbound_tag, "test-inbound");
|
||||||
|
assert_eq!(client_config.template_name, "test-template");
|
||||||
|
assert_eq!(client_config.protocol, "vless");
|
||||||
|
assert!(client_config.uri.starts_with("vless://"));
|
||||||
|
assert!(client_config.qr_code.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_variable_substitution() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
|
||||||
|
let template = json!({
|
||||||
|
"hostname": "${domain}",
|
||||||
|
"port": "${port}",
|
||||||
|
"fixed": "value"
|
||||||
|
});
|
||||||
|
|
||||||
|
let variables = json!({
|
||||||
|
"domain": "test.example.com",
|
||||||
|
"port": "9443"
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = service.apply_variable_substitution(&template, &variables);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let substituted = result.unwrap();
|
||||||
|
assert_eq!(substituted["hostname"], "test.example.com");
|
||||||
|
assert_eq!(substituted["port"], "9443");
|
||||||
|
assert_eq!(substituted["fixed"], "value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_variable_substitution_no_variables() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
|
||||||
|
let template = json!({
|
||||||
|
"hostname": "static.example.com",
|
||||||
|
"port": "8443"
|
||||||
|
});
|
||||||
|
|
||||||
|
let variables = json!({});
|
||||||
|
|
||||||
|
let result = service.apply_variable_substitution(&template, &variables);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let substituted = result.unwrap();
|
||||||
|
assert_eq!(substituted["hostname"], "static.example.com");
|
||||||
|
assert_eq!(substituted["port"], "8443");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_variable_substitution_partial_match() {
|
||||||
|
let service = UriGeneratorService::new();
|
||||||
|
|
||||||
|
let template = json!({
|
||||||
|
"hostname": "${domain}",
|
||||||
|
"port": "${unknown_var}",
|
||||||
|
"static": "value"
|
||||||
|
});
|
||||||
|
|
||||||
|
let variables = json!({
|
||||||
|
"domain": "test.example.com"
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = service.apply_variable_substitution(&template, &variables);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
let substituted = result.unwrap();
|
||||||
|
assert_eq!(substituted["hostname"], "test.example.com");
|
||||||
|
assert_eq!(substituted["port"], "${unknown_var}"); // Should remain unchanged
|
||||||
|
assert_eq!(substituted["static"], "value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_client_config_data_fields() {
|
||||||
|
let config = create_test_config("vless");
|
||||||
|
|
||||||
|
assert_eq!(config.user_name, "testuser");
|
||||||
|
assert_eq!(config.xray_user_id, "test-uuid-123");
|
||||||
|
assert_eq!(config.password, Some("test-password".to_string()));
|
||||||
|
assert_eq!(config.level, 0);
|
||||||
|
assert_eq!(config.hostname, "example.com");
|
||||||
|
assert_eq!(config.port, 8443);
|
||||||
|
assert_eq!(config.protocol, "vless");
|
||||||
|
assert_eq!(config.certificate_domain, Some("example.com".to_string()));
|
||||||
|
assert!(config.requires_tls);
|
||||||
|
assert_eq!(config.server_name, "test-server");
|
||||||
|
assert_eq!(config.inbound_tag, "test-inbound");
|
||||||
|
assert_eq!(config.template_name, "test-template");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_client_config_serialization() {
|
||||||
|
let user_id = Uuid::new_v4();
|
||||||
|
let client_config = ClientConfig {
|
||||||
|
user_id,
|
||||||
|
server_name: "test-server".to_string(),
|
||||||
|
inbound_tag: "test-inbound".to_string(),
|
||||||
|
template_name: "test-template".to_string(),
|
||||||
|
protocol: "vless".to_string(),
|
||||||
|
uri: "vless://test-uri".to_string(),
|
||||||
|
qr_code: Some("qr-code-data".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Test serialization
|
||||||
|
let serialized = serde_json::to_string(&client_config);
|
||||||
|
assert!(serialized.is_ok());
|
||||||
|
|
||||||
|
// Test deserialization
|
||||||
|
let deserialized: Result<ClientConfig, _> = serde_json::from_str(&serialized.unwrap());
|
||||||
|
assert!(deserialized.is_ok());
|
||||||
|
|
||||||
|
let config = deserialized.unwrap();
|
||||||
|
assert_eq!(config.user_id, user_id);
|
||||||
|
assert_eq!(config.server_name, "test-server");
|
||||||
|
assert_eq!(config.protocol, "vless");
|
||||||
|
assert_eq!(config.uri, "vless://test-uri");
|
||||||
|
assert_eq!(config.qr_code, Some("qr-code-data".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_client_config_qr_code_optional() {
|
||||||
|
let user_id = Uuid::new_v4();
|
||||||
|
let client_config = ClientConfig {
|
||||||
|
user_id,
|
||||||
|
server_name: "test-server".to_string(),
|
||||||
|
inbound_tag: "test-inbound".to_string(),
|
||||||
|
template_name: "test-template".to_string(),
|
||||||
|
protocol: "vless".to_string(),
|
||||||
|
uri: "vless://test-uri".to_string(),
|
||||||
|
qr_code: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let serialized = serde_json::to_string(&client_config).unwrap();
|
||||||
|
|
||||||
|
// QR code field should be omitted when None due to skip_serializing_if
|
||||||
|
assert!(!serialized.contains("qr_code"));
|
||||||
|
}
|
||||||
|
}
|
||||||
115
src/services/xray/client.rs
Normal file
115
src/services/xray/client.rs
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::time::{timeout, Duration};
|
||||||
|
use xray_core::Client;
|
||||||
|
|
||||||
|
// Import submodules from the same directory
|
||||||
|
use super::inbounds::InboundClient;
|
||||||
|
use super::stats::StatsClient;
|
||||||
|
use super::users::UserClient;
|
||||||
|
|
||||||
|
/// Xray gRPC client wrapper
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct XrayClient {
|
||||||
|
endpoint: String,
|
||||||
|
client: Arc<Client>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayClient {
|
||||||
|
/// Connect to Xray gRPC server with timeout
|
||||||
|
pub async fn connect(endpoint: &str) -> Result<Self> {
|
||||||
|
// Apply a 5-second timeout to the connection attempt
|
||||||
|
let connect_future = Client::from_url(endpoint);
|
||||||
|
|
||||||
|
match timeout(Duration::from_secs(5), connect_future).await {
|
||||||
|
Ok(Ok(client)) => Ok(Self {
|
||||||
|
endpoint: endpoint.to_string(),
|
||||||
|
client: Arc::new(client),
|
||||||
|
}),
|
||||||
|
Ok(Err(e)) => Err(anyhow!("Failed to connect to Xray at {}: {}", endpoint, e)),
|
||||||
|
Err(_) => Err(anyhow!(
|
||||||
|
"Connection to Xray at {} timed out after 5 seconds",
|
||||||
|
endpoint
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get server statistics
|
||||||
|
pub async fn get_stats(&self) -> Result<Value> {
|
||||||
|
let stats_client = StatsClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
stats_client.get_stats().await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query specific statistics with pattern
|
||||||
|
pub async fn query_stats(&self, pattern: &str, reset: bool) -> Result<Value> {
|
||||||
|
let stats_client = StatsClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
stats_client.query_stats(pattern, reset).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Restart Xray with new configuration
|
||||||
|
pub async fn restart_with_config(
|
||||||
|
&self,
|
||||||
|
config: &crate::services::xray::XrayConfig,
|
||||||
|
) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
inbound_client.restart_with_config(config).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration
|
||||||
|
pub async fn add_inbound(&self, inbound: &Value) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
inbound_client.add_inbound(inbound).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with TLS certificate
|
||||||
|
pub async fn add_inbound_with_certificate(
|
||||||
|
&self,
|
||||||
|
inbound: &Value,
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
inbound_client
|
||||||
|
.add_inbound_with_certificate(inbound, None, cert_pem, key_pem)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with users and TLS certificate
|
||||||
|
pub async fn add_inbound_with_users_and_certificate(
|
||||||
|
&self,
|
||||||
|
inbound: &Value,
|
||||||
|
users: &[Value],
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
inbound_client
|
||||||
|
.add_inbound_with_certificate(inbound, Some(users), cert_pem, key_pem)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub async fn remove_inbound(&self, tag: &str) -> Result<()> {
|
||||||
|
let inbound_client = InboundClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
inbound_client.remove_inbound(tag).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to inbound
|
||||||
|
pub async fn add_user(&self, inbound_tag: &str, user: &Value) -> Result<()> {
|
||||||
|
let user_client = UserClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
user_client.add_user(inbound_tag, user).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from inbound
|
||||||
|
pub async fn remove_user(&self, inbound_tag: &str, email: &str) -> Result<()> {
|
||||||
|
let user_client = UserClient::new(self.endpoint.clone(), &*self.client);
|
||||||
|
user_client.remove_user(inbound_tag, email).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get connection endpoint
|
||||||
|
pub fn endpoint(&self) -> &str {
|
||||||
|
&self.endpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
286
src/services/xray/config.rs
Normal file
286
src/services/xray/config.rs
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
/// Xray configuration structure
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct XrayConfig {
|
||||||
|
pub log: LogConfig,
|
||||||
|
pub api: ApiConfig,
|
||||||
|
pub dns: Option<DnsConfig>,
|
||||||
|
pub routing: Option<RoutingConfig>,
|
||||||
|
pub policy: Option<PolicyConfig>,
|
||||||
|
pub inbounds: Vec<InboundConfig>,
|
||||||
|
pub outbounds: Vec<OutboundConfig>,
|
||||||
|
pub transport: Option<TransportConfig>,
|
||||||
|
pub stats: Option<StatsConfig>,
|
||||||
|
pub reverse: Option<ReverseConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LogConfig {
|
||||||
|
pub access: Option<String>,
|
||||||
|
pub error: Option<String>,
|
||||||
|
#[serde(rename = "loglevel")]
|
||||||
|
pub log_level: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ApiConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub listen: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct DnsConfig {
|
||||||
|
pub servers: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct RoutingConfig {
|
||||||
|
#[serde(rename = "domainStrategy")]
|
||||||
|
pub domain_strategy: Option<String>,
|
||||||
|
pub rules: Vec<RoutingRule>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct RoutingRule {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub rule_type: String,
|
||||||
|
pub domain: Option<Vec<String>>,
|
||||||
|
pub ip: Option<Vec<String>>,
|
||||||
|
pub port: Option<String>,
|
||||||
|
#[serde(rename = "outboundTag")]
|
||||||
|
pub outbound_tag: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PolicyConfig {
|
||||||
|
pub levels: HashMap<String, PolicyLevel>,
|
||||||
|
pub system: Option<SystemPolicy>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PolicyLevel {
|
||||||
|
#[serde(rename = "handshakeTimeout")]
|
||||||
|
pub handshake_timeout: Option<u32>,
|
||||||
|
#[serde(rename = "connIdle")]
|
||||||
|
pub conn_idle: Option<u32>,
|
||||||
|
#[serde(rename = "uplinkOnly")]
|
||||||
|
pub uplink_only: Option<u32>,
|
||||||
|
#[serde(rename = "downlinkOnly")]
|
||||||
|
pub downlink_only: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SystemPolicy {
|
||||||
|
#[serde(rename = "statsInboundUplink")]
|
||||||
|
pub stats_inbound_uplink: Option<bool>,
|
||||||
|
#[serde(rename = "statsInboundDownlink")]
|
||||||
|
pub stats_inbound_downlink: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct InboundConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub port: u16,
|
||||||
|
pub listen: Option<String>,
|
||||||
|
pub protocol: String,
|
||||||
|
pub settings: Value,
|
||||||
|
#[serde(rename = "streamSettings")]
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub sniffing: Option<SniffingConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct OutboundConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub protocol: String,
|
||||||
|
pub settings: Value,
|
||||||
|
#[serde(rename = "streamSettings")]
|
||||||
|
pub stream_settings: Option<Value>,
|
||||||
|
pub mux: Option<MuxConfig>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SniffingConfig {
|
||||||
|
pub enabled: bool,
|
||||||
|
#[serde(rename = "destOverride")]
|
||||||
|
pub dest_override: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct MuxConfig {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub concurrency: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TransportConfig {
|
||||||
|
#[serde(rename = "tcpSettings")]
|
||||||
|
pub tcp_settings: Option<Value>,
|
||||||
|
#[serde(rename = "kcpSettings")]
|
||||||
|
pub kcp_settings: Option<Value>,
|
||||||
|
#[serde(rename = "wsSettings")]
|
||||||
|
pub ws_settings: Option<Value>,
|
||||||
|
#[serde(rename = "httpSettings")]
|
||||||
|
pub http_settings: Option<Value>,
|
||||||
|
#[serde(rename = "dsSettings")]
|
||||||
|
pub ds_settings: Option<Value>,
|
||||||
|
#[serde(rename = "quicSettings")]
|
||||||
|
pub quic_settings: Option<Value>,
|
||||||
|
#[serde(rename = "grpcSettings")]
|
||||||
|
pub grpc_settings: Option<Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct StatsConfig {}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ReverseConfig {
|
||||||
|
pub bridges: Option<Vec<BridgeConfig>>,
|
||||||
|
pub portals: Option<Vec<PortalConfig>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct BridgeConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub domain: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct PortalConfig {
|
||||||
|
pub tag: String,
|
||||||
|
pub domain: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayConfig {
|
||||||
|
/// Create a new basic Xray configuration
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
log: LogConfig {
|
||||||
|
access: Some("/var/log/xray/access.log".to_string()),
|
||||||
|
error: Some("/var/log/xray/error.log".to_string()),
|
||||||
|
log_level: "warning".to_string(),
|
||||||
|
},
|
||||||
|
api: ApiConfig {
|
||||||
|
tag: "api".to_string(),
|
||||||
|
listen: "127.0.0.1:2053".to_string(),
|
||||||
|
},
|
||||||
|
dns: None,
|
||||||
|
routing: Some(RoutingConfig {
|
||||||
|
domain_strategy: Some("IPIfNonMatch".to_string()),
|
||||||
|
rules: vec![RoutingRule {
|
||||||
|
rule_type: "field".to_string(),
|
||||||
|
domain: None,
|
||||||
|
ip: Some(vec!["geoip:private".to_string()]),
|
||||||
|
port: None,
|
||||||
|
outbound_tag: "direct".to_string(),
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
policy: Some(PolicyConfig {
|
||||||
|
levels: {
|
||||||
|
let mut levels = HashMap::new();
|
||||||
|
levels.insert(
|
||||||
|
"0".to_string(),
|
||||||
|
PolicyLevel {
|
||||||
|
handshake_timeout: Some(4),
|
||||||
|
conn_idle: Some(300),
|
||||||
|
uplink_only: Some(2),
|
||||||
|
downlink_only: Some(5),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
levels
|
||||||
|
},
|
||||||
|
system: Some(SystemPolicy {
|
||||||
|
stats_inbound_uplink: Some(true),
|
||||||
|
stats_inbound_downlink: Some(true),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
inbounds: vec![],
|
||||||
|
outbounds: vec![
|
||||||
|
OutboundConfig {
|
||||||
|
tag: "direct".to_string(),
|
||||||
|
protocol: "freedom".to_string(),
|
||||||
|
settings: serde_json::json!({}),
|
||||||
|
stream_settings: None,
|
||||||
|
mux: None,
|
||||||
|
},
|
||||||
|
OutboundConfig {
|
||||||
|
tag: "blocked".to_string(),
|
||||||
|
protocol: "blackhole".to_string(),
|
||||||
|
settings: serde_json::json!({
|
||||||
|
"response": {
|
||||||
|
"type": "http"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
stream_settings: None,
|
||||||
|
mux: None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
transport: None,
|
||||||
|
stats: Some(StatsConfig {}),
|
||||||
|
reverse: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound to configuration
|
||||||
|
pub fn add_inbound(&mut self, inbound: InboundConfig) {
|
||||||
|
self.inbounds.push(inbound);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub fn remove_inbound(&mut self, tag: &str) -> bool {
|
||||||
|
let initial_len = self.inbounds.len();
|
||||||
|
self.inbounds.retain(|inbound| inbound.tag != tag);
|
||||||
|
self.inbounds.len() != initial_len
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find inbound by tag
|
||||||
|
pub fn find_inbound(&self, tag: &str) -> Option<&InboundConfig> {
|
||||||
|
self.inbounds.iter().find(|inbound| inbound.tag == tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find inbound by tag (mutable)
|
||||||
|
pub fn find_inbound_mut(&mut self, tag: &str) -> Option<&mut InboundConfig> {
|
||||||
|
self.inbounds.iter_mut().find(|inbound| inbound.tag == tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to JSON Value
|
||||||
|
pub fn to_json(&self) -> Value {
|
||||||
|
serde_json::to_value(self).unwrap_or(Value::Null)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create from JSON Value
|
||||||
|
pub fn from_json(value: &Value) -> Result<Self, serde_json::Error> {
|
||||||
|
serde_json::from_value(value.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate configuration
|
||||||
|
pub fn validate(&self) -> Result<(), String> {
|
||||||
|
// Check for duplicate inbound tags
|
||||||
|
let mut tags = std::collections::HashSet::new();
|
||||||
|
for inbound in &self.inbounds {
|
||||||
|
if !tags.insert(&inbound.tag) {
|
||||||
|
return Err(format!("Duplicate inbound tag: {}", inbound.tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicate outbound tags
|
||||||
|
tags.clear();
|
||||||
|
for outbound in &self.outbounds {
|
||||||
|
if !tags.insert(&outbound.tag) {
|
||||||
|
return Err(format!("Duplicate outbound tag: {}", outbound.tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for XrayConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
360
src/services/xray/inbounds.rs
Normal file
360
src/services/xray/inbounds.rs
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use prost::Message;
|
||||||
|
use serde_json::Value;
|
||||||
|
use uuid;
|
||||||
|
use xray_core::{
|
||||||
|
app::proxyman::command::{AddInboundRequest, RemoveInboundRequest},
|
||||||
|
app::proxyman::ReceiverConfig,
|
||||||
|
common::net::{ip_or_domain::Address, IpOrDomain, Network, PortList, PortRange},
|
||||||
|
common::protocol::User,
|
||||||
|
common::serial::TypedMessage,
|
||||||
|
core::InboundHandlerConfig,
|
||||||
|
proxy::shadowsocks::ServerConfig as ShadowsocksServerConfig,
|
||||||
|
proxy::shadowsocks::{Account as ShadowsocksAccount, CipherType},
|
||||||
|
proxy::trojan::Account as TrojanAccount,
|
||||||
|
proxy::trojan::ServerConfig as TrojanServerConfig,
|
||||||
|
proxy::vless::inbound::Config as VlessInboundConfig,
|
||||||
|
proxy::vless::Account as VlessAccount,
|
||||||
|
proxy::vmess::inbound::Config as VmessInboundConfig,
|
||||||
|
proxy::vmess::Account as VmessAccount,
|
||||||
|
tonic::Request,
|
||||||
|
transport::internet::tls::{Certificate as TlsCertificate, Config as TlsConfig},
|
||||||
|
transport::internet::StreamConfig,
|
||||||
|
Client,
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
pub struct InboundClient<'a> {
|
||||||
|
endpoint: String,
|
||||||
|
client: &'a Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> InboundClient<'a> {
|
||||||
|
pub fn new(endpoint: String, client: &'a Client) -> Self {
|
||||||
|
Self { endpoint, client }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration
|
||||||
|
pub async fn add_inbound(&self, inbound: &Value) -> Result<()> {
|
||||||
|
self.add_inbound_with_certificate(inbound, None, None, None)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add inbound configuration with TLS certificate and users
|
||||||
|
pub async fn add_inbound_with_certificate(
|
||||||
|
&self,
|
||||||
|
inbound: &Value,
|
||||||
|
users: Option<&[Value]>,
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let tag = inbound["tag"].as_str().unwrap_or("").to_string();
|
||||||
|
let port = inbound["port"].as_u64().unwrap_or(8080) as u32;
|
||||||
|
let protocol = inbound["protocol"].as_str().unwrap_or("vless");
|
||||||
|
let _user_count = users.map_or(0, |u| u.len());
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
"Adding inbound '{}' with protocol={}, port={}, has_cert={}, has_key={}",
|
||||||
|
tag,
|
||||||
|
protocol,
|
||||||
|
port,
|
||||||
|
cert_pem.is_some(),
|
||||||
|
key_pem.is_some()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create receiver configuration (port binding) - use simple port number
|
||||||
|
let port_list = PortList {
|
||||||
|
range: vec![PortRange {
|
||||||
|
from: port,
|
||||||
|
to: port,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create StreamConfig with proper structure and TLS like working example
|
||||||
|
let stream_settings = if cert_pem.is_some() && key_pem.is_some() {
|
||||||
|
let cert_pem = cert_pem.unwrap();
|
||||||
|
let key_pem = key_pem.unwrap();
|
||||||
|
|
||||||
|
// Create TLS certificate exactly like working example - PEM content as bytes
|
||||||
|
let tls_cert = TlsCertificate {
|
||||||
|
certificate: cert_pem.as_bytes().to_vec(), // PEM content as bytes like working example
|
||||||
|
key: key_pem.as_bytes().to_vec(), // PEM content as bytes like working example
|
||||||
|
usage: 0,
|
||||||
|
ocsp_stapling: 3600, // From working example
|
||||||
|
one_time_loading: true, // From working example
|
||||||
|
build_chain: false,
|
||||||
|
certificate_path: "".to_string(), // Empty paths since we use content
|
||||||
|
key_path: "".to_string(), // Empty paths since we use content
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create TLS config with proper fields like working example
|
||||||
|
let mut tls_config = TlsConfig::default();
|
||||||
|
tls_config.certificate = vec![tls_cert];
|
||||||
|
tls_config.next_protocol = vec!["h2".to_string(), "http/1.1".to_string()]; // From working example
|
||||||
|
tls_config.server_name = "localhost".to_string(); // From working example
|
||||||
|
tls_config.min_version = "1.2".to_string(); // From Marzban examples
|
||||||
|
|
||||||
|
// Create TypedMessage for TLS config
|
||||||
|
let tls_message = TypedMessage {
|
||||||
|
r#type: "xray.transport.internet.tls.Config".to_string(),
|
||||||
|
value: tls_config.encode_to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!(
|
||||||
|
"TLS config: server_name={}, protocols={:?}",
|
||||||
|
tls_config.server_name,
|
||||||
|
tls_config.next_protocol
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create StreamConfig like working example
|
||||||
|
Some(StreamConfig {
|
||||||
|
address: None, // No address in streamSettings according to working example
|
||||||
|
port: 0, // No port in working example streamSettings
|
||||||
|
protocol_name: "tcp".to_string(),
|
||||||
|
transport_settings: vec![],
|
||||||
|
security_type: "xray.transport.internet.tls.Config".to_string(), // Full type like working example
|
||||||
|
security_settings: vec![tls_message],
|
||||||
|
socket_settings: None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let receiver_config = ReceiverConfig {
|
||||||
|
port_list: Some(port_list),
|
||||||
|
listen: Some(IpOrDomain {
|
||||||
|
address: Some(Address::Ip(vec![0, 0, 0, 0])), // "0.0.0.0" as IPv4 bytes
|
||||||
|
}),
|
||||||
|
allocation_strategy: None,
|
||||||
|
stream_settings: stream_settings,
|
||||||
|
receive_original_destination: false,
|
||||||
|
sniffing_settings: None, // TODO: add sniffing settings if needed
|
||||||
|
};
|
||||||
|
|
||||||
|
let receiver_message = TypedMessage {
|
||||||
|
r#type: "xray.app.proxyman.ReceiverConfig".to_string(),
|
||||||
|
value: receiver_config.encode_to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create proxy configuration based on protocol with users
|
||||||
|
let proxy_message = match protocol {
|
||||||
|
"vless" => {
|
||||||
|
let mut clients = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let user_id = user["id"].as_str().unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !user_id.is_empty() && !email.is_empty() {
|
||||||
|
let account = VlessAccount {
|
||||||
|
id: user_id,
|
||||||
|
encryption: "none".to_string(),
|
||||||
|
flow: "".to_string(),
|
||||||
|
};
|
||||||
|
clients.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.vless.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let vless_config = VlessInboundConfig {
|
||||||
|
clients,
|
||||||
|
decryption: "none".to_string(),
|
||||||
|
fallbacks: vec![],
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vless.inbound.Config".to_string(),
|
||||||
|
value: vless_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"vmess" => {
|
||||||
|
let mut vmess_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let user_id = user["id"].as_str().unwrap_or("").to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
if user_id.is_empty() || email.is_empty() {
|
||||||
|
tracing::warn!("Skipping VMess user: missing id or email");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate UUID format
|
||||||
|
if uuid::Uuid::parse_str(&user_id).is_err() {
|
||||||
|
tracing::warn!("VMess user '{}' has invalid UUID format", user_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !user_id.is_empty() && !email.is_empty() {
|
||||||
|
let account = VmessAccount {
|
||||||
|
id: user_id.clone(),
|
||||||
|
security_settings: None,
|
||||||
|
tests_enabled: "".to_string(), // Keep empty as in examples
|
||||||
|
};
|
||||||
|
let account_bytes = account.encode_to_vec();
|
||||||
|
|
||||||
|
vmess_users.push(User {
|
||||||
|
email: email.clone(),
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.vmess.Account".to_string(),
|
||||||
|
value: account_bytes,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let vmess_config = VmessInboundConfig {
|
||||||
|
user: vmess_users,
|
||||||
|
default: None,
|
||||||
|
detour: None,
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.vmess.inbound.Config".to_string(),
|
||||||
|
value: vmess_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"trojan" => {
|
||||||
|
let mut trojan_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let password = user["password"]
|
||||||
|
.as_str()
|
||||||
|
.or_else(|| user["id"].as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !password.is_empty() && !email.is_empty() {
|
||||||
|
let account = TrojanAccount { password };
|
||||||
|
trojan_users.push(User {
|
||||||
|
email,
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.trojan.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let trojan_config = TrojanServerConfig {
|
||||||
|
users: trojan_users,
|
||||||
|
fallbacks: vec![],
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.trojan.ServerConfig".to_string(),
|
||||||
|
value: trojan_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"shadowsocks" => {
|
||||||
|
let mut ss_users = vec![];
|
||||||
|
if let Some(users) = users {
|
||||||
|
for user in users {
|
||||||
|
let password = user["password"]
|
||||||
|
.as_str()
|
||||||
|
.or_else(|| user["id"].as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let email = user["email"].as_str().unwrap_or("").to_string();
|
||||||
|
let level = user["level"].as_u64().unwrap_or(0) as u32;
|
||||||
|
|
||||||
|
if !password.is_empty() && !email.is_empty() {
|
||||||
|
let account = ShadowsocksAccount {
|
||||||
|
password,
|
||||||
|
cipher_type: CipherType::Aes256Gcm as i32, // Use AES-256-GCM cipher
|
||||||
|
iv_check: false, // Default IV check
|
||||||
|
};
|
||||||
|
ss_users.push(User {
|
||||||
|
email: email.clone(),
|
||||||
|
level,
|
||||||
|
account: Some(TypedMessage {
|
||||||
|
r#type: "xray.proxy.shadowsocks.Account".to_string(),
|
||||||
|
value: account.encode_to_vec(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let shadowsocks_config = ShadowsocksServerConfig {
|
||||||
|
users: ss_users,
|
||||||
|
network: vec![Network::Tcp as i32, Network::Udp as i32], // Support TCP and UDP
|
||||||
|
};
|
||||||
|
TypedMessage {
|
||||||
|
r#type: "xray.proxy.shadowsocks.ServerConfig".to_string(),
|
||||||
|
value: shadowsocks_config.encode_to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!("Unsupported protocol: {}", protocol));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let inbound_config = InboundHandlerConfig {
|
||||||
|
tag: tag.clone(),
|
||||||
|
receiver_settings: Some(receiver_message),
|
||||||
|
proxy_settings: Some(proxy_message),
|
||||||
|
};
|
||||||
|
|
||||||
|
let request = Request::new(AddInboundRequest {
|
||||||
|
inbound: Some(inbound_config),
|
||||||
|
});
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
match handler_client.add_inbound(request).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Added {} inbound '{}' successfully", protocol, tag);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to add {} inbound '{}': {}", protocol, tag, e);
|
||||||
|
Err(anyhow!("Failed to add inbound {}: {}", tag, e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound by tag
|
||||||
|
pub async fn remove_inbound(&self, tag: &str) -> Result<()> {
|
||||||
|
let mut handler_client = self.client.handler();
|
||||||
|
let request = Request::new(RemoveInboundRequest {
|
||||||
|
tag: tag.to_string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
match handler_client.remove_inbound(request).await {
|
||||||
|
Ok(_) => {
|
||||||
|
tracing::info!("Removed inbound '{}' from {}", tag, self.endpoint);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("Failed to remove inbound '{}': {}", tag, e);
|
||||||
|
Err(anyhow!("Failed to remove inbound: {}", e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Restart Xray with new configuration
|
||||||
|
pub async fn restart_with_config(
|
||||||
|
&self,
|
||||||
|
_config: &crate::services::xray::XrayConfig,
|
||||||
|
) -> Result<()> {
|
||||||
|
tracing::debug!(
|
||||||
|
"Restarting Xray server at {} with new config",
|
||||||
|
self.endpoint
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: Implement restart with config using xray-core
|
||||||
|
// For now just return success
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
445
src/services/xray/mod.rs
Normal file
445
src/services/xray/mod.rs
Normal file
@@ -0,0 +1,445 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
use tokio::time::{timeout, Duration, Instant};
|
||||||
|
use tracing::warn;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub mod client;
|
||||||
|
pub mod config;
|
||||||
|
pub mod inbounds;
|
||||||
|
pub mod stats;
|
||||||
|
pub mod users;
|
||||||
|
|
||||||
|
pub use client::XrayClient;
|
||||||
|
pub use config::XrayConfig;
|
||||||
|
|
||||||
|
/// Cached connection with TTL
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct CachedConnection {
|
||||||
|
client: XrayClient,
|
||||||
|
created_at: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CachedConnection {
|
||||||
|
fn new(client: XrayClient) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
created_at: Instant::now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_expired(&self, ttl: Duration) -> bool {
|
||||||
|
self.created_at.elapsed() > ttl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Service for managing Xray servers via gRPC
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct XrayService {
|
||||||
|
connection_cache: Arc<RwLock<HashMap<String, CachedConnection>>>,
|
||||||
|
connection_ttl: Duration,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayService {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
connection_cache: Arc::new(RwLock::new(HashMap::new())),
|
||||||
|
connection_ttl: Duration::from_secs(300), // 5 minutes TTL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create service with custom TTL for testing
|
||||||
|
pub fn with_ttl(ttl: Duration) -> Self {
|
||||||
|
Self {
|
||||||
|
connection_cache: Arc::new(RwLock::new(HashMap::new())),
|
||||||
|
connection_ttl: ttl,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get or create cached client for endpoint
|
||||||
|
async fn get_or_create_client(&self, endpoint: &str) -> Result<XrayClient> {
|
||||||
|
// Check cache first
|
||||||
|
{
|
||||||
|
let cache = self.connection_cache.read().await;
|
||||||
|
if let Some(cached) = cache.get(endpoint) {
|
||||||
|
if !cached.is_expired(self.connection_ttl) {
|
||||||
|
return Ok(cached.client.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new connection
|
||||||
|
let client = XrayClient::connect(endpoint).await?;
|
||||||
|
let cached_connection = CachedConnection::new(client.clone());
|
||||||
|
|
||||||
|
// Update cache
|
||||||
|
{
|
||||||
|
let mut cache = self.connection_cache.write().await;
|
||||||
|
cache.insert(endpoint.to_string(), cached_connection);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(client)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to Xray server with timeout
|
||||||
|
pub async fn test_connection(&self, _server_id: Uuid, endpoint: &str) -> Result<bool> {
|
||||||
|
// Apply a 3-second timeout to the entire test operation
|
||||||
|
match timeout(Duration::from_secs(3), self.get_or_create_client(endpoint)).await {
|
||||||
|
Ok(Ok(_client)) => {
|
||||||
|
// Connection successful
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
// Connection failed with error
|
||||||
|
warn!("Failed to connect to Xray at {}: {}", endpoint, e);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// Operation timed out
|
||||||
|
warn!("Connection test to Xray at {} timed out", endpoint);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get statistics from Xray server
|
||||||
|
pub async fn get_stats(&self, endpoint: &str) -> Result<Value> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.get_stats().await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query specific statistics with pattern
|
||||||
|
pub async fn query_stats(&self, endpoint: &str, pattern: &str, reset: bool) -> Result<Value> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.query_stats(pattern, reset).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add user to server with specific inbound and configuration
|
||||||
|
pub async fn add_user(&self, endpoint: &str, inbound_tag: &str, user: &Value) -> Result<()> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.add_user(inbound_tag, user).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from server
|
||||||
|
pub async fn remove_user(
|
||||||
|
&self,
|
||||||
|
endpoint: &str,
|
||||||
|
inbound_tag: &str,
|
||||||
|
user_email: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.remove_user(inbound_tag, user_email).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove user from server (with server_id parameter for compatibility)
|
||||||
|
pub async fn remove_user_with_server_id(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
inbound_tag: &str,
|
||||||
|
user_email: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.remove_user(endpoint, inbound_tag, user_email).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new inbound on server
|
||||||
|
pub async fn create_inbound(&self, endpoint: &str, inbound: &Value) -> Result<()> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.add_inbound(inbound).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create inbound with certificate (legacy interface for compatibility)
|
||||||
|
pub async fn create_inbound_with_certificate(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
_tag: &str,
|
||||||
|
_port: i32,
|
||||||
|
_protocol: &str,
|
||||||
|
_base_settings: Value,
|
||||||
|
_stream_settings: Value,
|
||||||
|
cert_pem: Option<&str>,
|
||||||
|
key_pem: Option<&str>,
|
||||||
|
) -> Result<()> {
|
||||||
|
// For now, create a basic inbound structure
|
||||||
|
// In real implementation, this would build the inbound from the parameters
|
||||||
|
let inbound = serde_json::json!({
|
||||||
|
"tag": _tag,
|
||||||
|
"port": _port,
|
||||||
|
"protocol": _protocol,
|
||||||
|
"settings": _base_settings,
|
||||||
|
"streamSettings": _stream_settings
|
||||||
|
});
|
||||||
|
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client
|
||||||
|
.add_inbound_with_certificate(&inbound, cert_pem, key_pem)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update existing inbound on server
|
||||||
|
pub async fn update_inbound(&self, endpoint: &str, inbound: &Value) -> Result<()> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.add_inbound(inbound).await // For now, just add - update logic would be more complex
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete inbound from server
|
||||||
|
pub async fn delete_inbound(&self, endpoint: &str, tag: &str) -> Result<()> {
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
client.remove_inbound(tag).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove inbound from server (alias for delete_inbound)
|
||||||
|
pub async fn remove_inbound(&self, _server_id: Uuid, endpoint: &str, tag: &str) -> Result<()> {
|
||||||
|
self.delete_inbound(endpoint, tag).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get cache statistics for monitoring
|
||||||
|
pub async fn get_cache_stats(&self) -> (usize, usize) {
|
||||||
|
let cache = self.connection_cache.read().await;
|
||||||
|
let total = cache.len();
|
||||||
|
let expired = cache
|
||||||
|
.values()
|
||||||
|
.filter(|conn| conn.is_expired(self.connection_ttl))
|
||||||
|
.count();
|
||||||
|
(total, expired)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear expired connections from cache
|
||||||
|
pub async fn clear_expired_connections(&self) {
|
||||||
|
let mut cache = self.connection_cache.write().await;
|
||||||
|
cache.retain(|_, conn| !conn.is_expired(self.connection_ttl));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear all connections from cache
|
||||||
|
pub async fn clear_cache(&self) {
|
||||||
|
let mut cache = self.connection_cache.write().await;
|
||||||
|
cache.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Additional methods that were in the original file but truncated
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl XrayService {
|
||||||
|
/// Generic method to execute operations on client with retry
|
||||||
|
async fn execute_with_retry<F, R>(&self, endpoint: &str, operation: F) -> Result<R>
|
||||||
|
where
|
||||||
|
F: Fn(XrayClient) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<R>> + Send>>,
|
||||||
|
{
|
||||||
|
let client = self.get_or_create_client(endpoint).await?;
|
||||||
|
operation(client).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync user with Xray server - ensures user exists with correct config
|
||||||
|
pub async fn sync_user(
|
||||||
|
&self,
|
||||||
|
server_id: Uuid,
|
||||||
|
endpoint: &str,
|
||||||
|
inbound_tag: &str,
|
||||||
|
user: &Value,
|
||||||
|
) -> Result<()> {
|
||||||
|
let _server_id = server_id;
|
||||||
|
let _endpoint = endpoint;
|
||||||
|
let _inbound_tag = inbound_tag;
|
||||||
|
let _user = user;
|
||||||
|
// Implementation would go here
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Batch operation to sync multiple users
|
||||||
|
pub async fn sync_users(
|
||||||
|
&self,
|
||||||
|
endpoint: &str,
|
||||||
|
inbound_tag: &str,
|
||||||
|
users: Vec<&Value>,
|
||||||
|
) -> Result<Vec<Result<()>>> {
|
||||||
|
let mut results = Vec::new();
|
||||||
|
for user in users {
|
||||||
|
let result = self.add_user(endpoint, inbound_tag, user).await;
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get user statistics for specific user
|
||||||
|
pub async fn get_user_stats(&self, endpoint: &str, user_email: &str) -> Result<Value> {
|
||||||
|
let pattern = format!("user>>>{}>>>traffic", user_email);
|
||||||
|
self.query_stats(endpoint, &pattern, false).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset user statistics
|
||||||
|
pub async fn reset_user_stats(&self, endpoint: &str, user_email: &str) -> Result<Value> {
|
||||||
|
let pattern = format!("user>>>{}>>>traffic", user_email);
|
||||||
|
self.query_stats(endpoint, &pattern, true).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Health check for server
|
||||||
|
pub async fn health_check(&self, endpoint: &str) -> Result<bool> {
|
||||||
|
match self.get_stats(endpoint).await {
|
||||||
|
Ok(_) => Ok(true),
|
||||||
|
Err(_) => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync server inbounds optimized (placeholder implementation)
|
||||||
|
pub async fn sync_server_inbounds_optimized(
|
||||||
|
&self,
|
||||||
|
_server_id: Uuid,
|
||||||
|
_endpoint: &str,
|
||||||
|
_desired_inbounds: &std::collections::HashMap<
|
||||||
|
String,
|
||||||
|
crate::services::tasks::DesiredInbound,
|
||||||
|
>,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Placeholder implementation for tasks.rs compatibility
|
||||||
|
// In real implementation, this would:
|
||||||
|
// 1. Get current inbounds from server
|
||||||
|
// 2. Compare with desired inbounds
|
||||||
|
// 3. Add/remove/update as needed
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tokio::time::Duration;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_xray_service_creation() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
let (total, expired) = service.get_cache_stats().await;
|
||||||
|
assert_eq!(total, 0);
|
||||||
|
assert_eq!(expired, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_xray_service_with_custom_ttl() {
|
||||||
|
let custom_ttl = Duration::from_millis(100);
|
||||||
|
let service = XrayService::with_ttl(custom_ttl);
|
||||||
|
assert_eq!(service.connection_ttl, custom_ttl);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cache_expiration() {
|
||||||
|
let service = XrayService::with_ttl(Duration::from_millis(50));
|
||||||
|
|
||||||
|
// This test doesn't actually connect since we don't have a real Xray server
|
||||||
|
// but tests the caching logic structure
|
||||||
|
let (total, expired) = service.get_cache_stats().await;
|
||||||
|
assert_eq!(total, 0);
|
||||||
|
assert_eq!(expired, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cache_clearing() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
|
||||||
|
// Clear empty cache
|
||||||
|
service.clear_cache().await;
|
||||||
|
let (total, _) = service.get_cache_stats().await;
|
||||||
|
assert_eq!(total, 0);
|
||||||
|
|
||||||
|
// Clear expired connections from empty cache
|
||||||
|
service.clear_expired_connections().await;
|
||||||
|
let (total, _) = service.get_cache_stats().await;
|
||||||
|
assert_eq!(total, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_connection_timeout() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
let server_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
// Test with invalid endpoint - should return false due to connection failure
|
||||||
|
let result = service
|
||||||
|
.test_connection(server_id, "invalid://endpoint")
|
||||||
|
.await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_health_check_with_invalid_endpoint() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
|
||||||
|
// Test health check with invalid endpoint
|
||||||
|
let result = service.health_check("invalid://endpoint").await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cached_connection_expiration() {
|
||||||
|
// Create a mock client for testing purposes
|
||||||
|
// In real tests, we would use a mock framework
|
||||||
|
let _now = Instant::now();
|
||||||
|
|
||||||
|
// Test the expiration logic directly without creating an actual client
|
||||||
|
let short_ttl = Duration::from_nanos(1);
|
||||||
|
let long_ttl = Duration::from_secs(1);
|
||||||
|
|
||||||
|
// Simulate time passage
|
||||||
|
let elapsed_short = Duration::from_nanos(10);
|
||||||
|
let elapsed_long = Duration::from_millis(10);
|
||||||
|
|
||||||
|
// Test expiration logic
|
||||||
|
assert!(elapsed_short > short_ttl);
|
||||||
|
assert!(elapsed_long < long_ttl);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_user_stats_pattern_generation() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
let user_email = "test@example.com";
|
||||||
|
|
||||||
|
// We can't test the actual stats call without a real server,
|
||||||
|
// but we can test that the method doesn't panic and returns an error for invalid endpoint
|
||||||
|
let result = service
|
||||||
|
.get_user_stats("invalid://endpoint", user_email)
|
||||||
|
.await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sync_users_empty_list() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
let users: Vec<&serde_json::Value> = vec![];
|
||||||
|
|
||||||
|
let results = service
|
||||||
|
.sync_users("invalid://endpoint", "test_inbound", users)
|
||||||
|
.await;
|
||||||
|
assert!(results.is_ok());
|
||||||
|
assert_eq!(results.unwrap().len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function for creating test user data
|
||||||
|
fn create_test_user() -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"email": "test@example.com",
|
||||||
|
"id": "test-user-id",
|
||||||
|
"level": 0
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_sync_users_with_data() {
|
||||||
|
let service = XrayService::new();
|
||||||
|
let user_data = create_test_user();
|
||||||
|
let users = vec![&user_data];
|
||||||
|
|
||||||
|
// This will fail due to invalid endpoint, but tests the structure
|
||||||
|
let results = service
|
||||||
|
.sync_users("invalid://endpoint", "test_inbound", users)
|
||||||
|
.await;
|
||||||
|
assert!(results.is_ok());
|
||||||
|
let results = results.unwrap();
|
||||||
|
assert_eq!(results.len(), 1);
|
||||||
|
assert!(results[0].is_err()); // Should fail due to invalid endpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user