添加基础性能测试分析代码
This commit is contained in:
26
scripts/env/dev/docker-compose.yaml
vendored
Normal file
26
scripts/env/dev/docker-compose.yaml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: proxy-server
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
container_name: proxy-server-dev-postgres
|
||||
image: postgres:17
|
||||
environment:
|
||||
POSTGRES_DB: $DB_DATABASE
|
||||
POSTGRES_USER: $DB_USERNAME
|
||||
POSTGRES_PASSWORD: $DB_PASSWORD
|
||||
ports:
|
||||
- "${DB_PORT}:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- proxy-server-dev
|
||||
restart: always
|
||||
|
||||
networks:
|
||||
proxy-server-dev:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
name: proxy-server-dev-postgres-data
|
||||
68
scripts/env/test/docker-compose.yaml
vendored
Normal file
68
scripts/env/test/docker-compose.yaml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
name: proxy-server
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
container_name: proxy-server-dev-postgres
|
||||
image: postgres:17
|
||||
environment:
|
||||
POSTGRES_DB: $DB_DATABASE
|
||||
POSTGRES_USER: $DB_USERNAME
|
||||
POSTGRES_PASSWORD: $DB_PASSWORD
|
||||
ports:
|
||||
- "${DB_PORT}:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- proxy-server-test
|
||||
restart: always
|
||||
|
||||
service:
|
||||
container_name: proxy-server-dev-service
|
||||
build:
|
||||
context: ../../..
|
||||
dockerfile: ../../../Dockerfile
|
||||
environment:
|
||||
APP_CTRL_PORT: $APP_CTRL_PORT
|
||||
APP_DATA_PORT: $APP_DATA_PORT
|
||||
APP_LOG_MODE: $APP_LOG_MODE
|
||||
DB_HOST: $DB_HOST
|
||||
DB_PORT: $DB_PORT
|
||||
DB_DATABASE: $DB_DATABASE
|
||||
DB_USERNAME: $DB_USERNAME
|
||||
DB_PASSWORD: $DB_PASSWORD
|
||||
DB_TIMEZONE: $DB_TIMEZONE
|
||||
ports:
|
||||
- "${APP_CTRL_PORT}:${APP_CTRL_PORT}"
|
||||
- "${APP_DATA_PORT}:${APP_DATA_PORT}"
|
||||
- "6060:6060"
|
||||
- "20000-20100:20000-20100"
|
||||
networks:
|
||||
- proxy-server-test
|
||||
depends_on:
|
||||
- postgres
|
||||
restart: always
|
||||
|
||||
vector:
|
||||
container_name: proxy-server-dev-vector
|
||||
build:
|
||||
context: vector
|
||||
dockerfile: vector/Dockerfile
|
||||
ports:
|
||||
- "8686:8686"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /var/log/vector:/temp/vector
|
||||
networks:
|
||||
- proxy-server-test
|
||||
depends_on:
|
||||
- service
|
||||
restart: always
|
||||
|
||||
networks:
|
||||
proxy-server-test:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
name: proxy-server-test-postgres-data
|
||||
6
scripts/env/test/vector/Dockerfile
vendored
Normal file
6
scripts/env/test/vector/Dockerfile
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM timberio/vector:0.45.0-debian
|
||||
|
||||
# Copy the configuration file
|
||||
COPY vector.toml /etc/vector/vector.toml
|
||||
|
||||
CMD ["-c", "/etc/vector/vector.toml"]
|
||||
56
scripts/env/test/vector/vector.toml
vendored
Normal file
56
scripts/env/test/vector/vector.toml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
timezone = "Asia/Shanghai"
|
||||
|
||||
[api]
|
||||
enabled = true
|
||||
address = "0.0.0.0:8686"
|
||||
|
||||
[sources.docker]
|
||||
type = "docker_logs"
|
||||
include_containers = ["proxy-server-dev-service"]
|
||||
|
||||
[transforms.parse]
|
||||
inputs = ["docker"]
|
||||
type = "remap"
|
||||
source = '''
|
||||
. = parse_json!(.message)
|
||||
.time, err = format_timestamp(parse_timestamp!(.time, "%+"), "%Y-%m-%dT%H:%M:%S", "Asia/Shanghai")
|
||||
if err != null {
|
||||
log("Unable to parse JSON: " + err, level: "error")
|
||||
abort
|
||||
}
|
||||
'''
|
||||
|
||||
# ===============
|
||||
# 用户访问记录
|
||||
# ===============
|
||||
|
||||
[transforms.destinations]
|
||||
inputs = ["parse"]
|
||||
type = "filter"
|
||||
condition = ".msg == \"用户访问记录\""
|
||||
|
||||
[sinks.file_out]
|
||||
inputs = ["destinations"]
|
||||
type = "file"
|
||||
path = "/temp/vector/service-destinations/%Y-%m-%d.log"
|
||||
encoding.codec = "csv"
|
||||
encoding.csv.fields = ["time", "uid", "user", "proxy", "node", "proto", "dest", "domain"]
|
||||
buffer.type = "disk"
|
||||
buffer.max_size = 268435488
|
||||
|
||||
# ===============
|
||||
# errors
|
||||
# ===============
|
||||
|
||||
[transforms.errors]
|
||||
inputs = ["parse"]
|
||||
type = "filter"
|
||||
condition = ".level == \"ERROR\""
|
||||
|
||||
[sinks.file_errors]
|
||||
inputs = ["errors"]
|
||||
type = "file"
|
||||
path = "/temp/vector/service-errors/%Y-%m-%d.log"
|
||||
encoding.codec = "json"
|
||||
buffer.type = "disk"
|
||||
buffer.max_size = 268435488
|
||||
74
scripts/sql/init.sql
Normal file
74
scripts/sql/init.sql
Normal file
@@ -0,0 +1,74 @@
|
||||
-- nodes
|
||||
drop table if exists nodes cascade;
|
||||
create table nodes (
|
||||
id serial primary key,
|
||||
name varchar(255) not null unique,
|
||||
provider varchar(255) not null,
|
||||
location varchar(255) not null,
|
||||
ip_address varchar(255) not null,
|
||||
created_at timestamp default current_timestamp,
|
||||
updated_at timestamp default current_timestamp,
|
||||
deleted_at timestamp
|
||||
);
|
||||
create index devices_provider_index on nodes (provider);
|
||||
create index devices_location_index on nodes (location);
|
||||
|
||||
-- users
|
||||
drop table if exists users cascade;
|
||||
create table users (
|
||||
id serial primary key,
|
||||
password varchar(255) not null,
|
||||
username varchar(255) not null unique,
|
||||
email varchar(255) not null unique,
|
||||
phone varchar(255) not null unique,
|
||||
name varchar(255) not null,
|
||||
created_at timestamp default current_timestamp,
|
||||
updated_at timestamp default current_timestamp,
|
||||
deleted_at timestamp
|
||||
);
|
||||
|
||||
-- user_ips
|
||||
drop table if exists user_ips cascade;
|
||||
create table user_ips (
|
||||
id serial primary key,
|
||||
user_id int not null references users (id)
|
||||
on update cascade
|
||||
on delete cascade,
|
||||
ip_address varchar(255) not null,
|
||||
created_at timestamp default current_timestamp,
|
||||
updated_at timestamp default current_timestamp,
|
||||
deleted_at timestamp
|
||||
);
|
||||
create index user_ips_user_id_index on user_ips (user_id);
|
||||
create index user_ips_ip_address_index on user_ips (ip_address);
|
||||
|
||||
-- channel
|
||||
drop table if exists channels cascade;
|
||||
create table channels (
|
||||
id serial primary key,
|
||||
user_id int not null references users (id)
|
||||
on update cascade
|
||||
on delete cascade,
|
||||
node_id int not null references nodes (id) --
|
||||
on update cascade --
|
||||
on delete set null, -- 节点删除后,用户侧需要保留提取记录
|
||||
node_port int,
|
||||
protocol varchar(255),
|
||||
auth_ip bool,
|
||||
auth_pass bool,
|
||||
username varchar(255) unique,
|
||||
password varchar(255),
|
||||
expiration timestamp not null,
|
||||
created_at timestamp default current_timestamp,
|
||||
updated_at timestamp default current_timestamp,
|
||||
deleted_at timestamp
|
||||
);
|
||||
create index channel_user_id_index on channels (user_id);
|
||||
create index channel_node_id_index on channels (node_id);
|
||||
create index channel_username_index on channels (username);
|
||||
|
||||
-- ====================
|
||||
-- 填充数据
|
||||
-- ====================
|
||||
|
||||
|
||||
Reference in New Issue
Block a user