mirror of
https://github.com/20kaushik02/real-time-traffic-analysis-clickhouse.git
synced 2025-12-06 06:44:07 +00:00
cc map, full scale data testing
This commit is contained in:
parent
11abeb5337
commit
a5887f30f5
@ -10,13 +10,14 @@ if __name__ == "__main__":
|
||||
|
||||
# extracting details of each running container in json format
|
||||
try:
|
||||
all_services = subprocess.check_output(["docker","ps","--format","json"],text=True).split('\n')[:-1]
|
||||
all_services = subprocess.check_output(["sudo", "docker","service","ls","--format","json"],text=True).split('\n')[:-1]
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Command failed with return code {e.returncode}")
|
||||
|
||||
all_services = [json.loads(s) for s in all_services]
|
||||
# extracting the name, removing the custom id from it and storing it in a list
|
||||
all_service_names = [service['Names'].split('.')[0] for service in all_services if re.findall(r'clickhouse-server',service['Names'])]
|
||||
# all_service_names = [service['Names'].split('.')[0] for service in all_services if re.findall(r'clickhouse-server',service['Names'])]
|
||||
all_service_names = [service['Name'] for service in all_services if re.findall(r'clickhouse-server',service['Name'])]
|
||||
# extracting only 'server1','server2'...
|
||||
all_service_names = [ name.split('-')[-1] for name in all_service_names]
|
||||
|
||||
@ -41,7 +42,7 @@ if __name__ == "__main__":
|
||||
</shard>
|
||||
'''
|
||||
# extracting existing remote-servers file
|
||||
with open('../node1-config/remote-servers.xml','r') as f:
|
||||
with open('../clickhouse/node1-config/remote-servers.xml','r') as f:
|
||||
curr_remote_servers_xml = ET.parse(f)
|
||||
|
||||
cluster_root = curr_remote_servers_xml.find('.//cluster_1S_2R')
|
||||
@ -49,20 +50,20 @@ if __name__ == "__main__":
|
||||
cluster_root.append(new_shard_xml)
|
||||
|
||||
# creating folders for new servers that contain the configuration files
|
||||
os.makedirs(f'../node{curr_num_servers+1}-config',exist_ok=True)
|
||||
os.makedirs(f'../node{curr_num_servers+2}-config',exist_ok=True)
|
||||
os.makedirs(f'../clickhouse/node{curr_num_servers+1}-config',exist_ok=True)
|
||||
os.makedirs(f'../clickhouse/node{curr_num_servers+2}-config',exist_ok=True)
|
||||
|
||||
# adding the new shard to each remote-servers file
|
||||
for i in range(1,curr_num_servers+3):
|
||||
output_path = f'../node{i}-config/remote-servers.xml'
|
||||
output_path = f'../clickhouse/node{i}-config/remote-servers.xml'
|
||||
curr_remote_servers_xml.write(output_path, encoding='utf-8', xml_declaration=False)
|
||||
|
||||
env = Environment(loader=FileSystemLoader('../jinja-templates'))
|
||||
env = Environment(loader=FileSystemLoader('../clickhouse/jinja-templates'))
|
||||
service_template = env.get_template('service.yml.jinja')
|
||||
volume_template = env.get_template('volume.yml.jinja')
|
||||
|
||||
# loading existing docker-compose file
|
||||
with open('../docker-compose.yaml','r') as f:
|
||||
with open('../clickhouse/docker-compose.yaml','r') as f:
|
||||
compose_f = yaml.safe_load(f)
|
||||
|
||||
# rendering the new service
|
||||
@ -79,7 +80,7 @@ if __name__ == "__main__":
|
||||
compose_f['volumes'].update(new_volume2)
|
||||
|
||||
if compose_f:
|
||||
with open('../docker-compose.yaml','w') as yamlfile:
|
||||
with open('../clickhouse/docker-compose.yaml','w') as yamlfile:
|
||||
yaml.safe_dump(compose_f, yamlfile)
|
||||
|
||||
config_template = env.get_template('config.xml.jinja')
|
||||
@ -89,18 +90,18 @@ if __name__ == "__main__":
|
||||
|
||||
for i in range(1,3):
|
||||
config_content = config_template.render(node_num=curr_num_servers+i)
|
||||
with open(f'../node{curr_num_servers + i}-config/config.xml','w') as f1:
|
||||
with open(f'../clickhouse/node{curr_num_servers + i}-config/config.xml','w') as f1:
|
||||
f1.write(config_content)
|
||||
|
||||
macros_content = macros_template.render(shard_num="0"+str(int(curr_num_shards+1)),replica_num=i)
|
||||
with open(f'../node{curr_num_servers + i}-config/macros.xml','w') as f2:
|
||||
with open(f'../clickhouse/node{curr_num_servers + i}-config/macros.xml','w') as f2:
|
||||
f2.write(macros_content)
|
||||
|
||||
use_keeper_content = use_keeper_template.render()
|
||||
with open(f'../node{curr_num_servers + i}-config/use-keeper.xml','w') as f3:
|
||||
with open(f'../clickhouse/node{curr_num_servers + i}-config/use-keeper.xml','w') as f3:
|
||||
f3.write(use_keeper_content)
|
||||
|
||||
storage_policy_content = storage_policy_template.render(server_num=curr_num_servers+i)
|
||||
with open(f'../node{curr_num_servers + i}-config/storage-policy.xml','w') as f4:
|
||||
with open(f'../clickhouse/node{curr_num_servers + i}-config/storage-policy.xml','w') as f4:
|
||||
f4.write(storage_policy_content)
|
||||
|
||||
|
||||
@ -7,28 +7,34 @@ import time
|
||||
def check_util_exec():
|
||||
# extracting details of each running container in json format
|
||||
try:
|
||||
all_services = subprocess.check_output(["docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
|
||||
all_services = subprocess.check_output(["sudo", "docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Command failed with return code {e.returncode}")
|
||||
|
||||
all_services = [json.loads(s) for s in all_services]
|
||||
|
||||
resource_util_exceed_flag = True # Flag to check if all of the containers have exceeded 80% memory utilization
|
||||
resource_util_exceed_flag = True # Flag to check if all of the containers have exceeded 80% memory utilization
|
||||
for service in all_services:
|
||||
if re.findall(r'clickhouse-server',service['Name']):
|
||||
if float(service['MemPerc'][:-1]) < 80:
|
||||
if float(service['MemPerc'][:-1]) < 60:
|
||||
resource_util_exceed_flag = False
|
||||
|
||||
if resource_util_exceed_flag:
|
||||
process = subprocess.Popen(['python3','update_compose.py'],text=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
|
||||
process = subprocess.Popen(['python3','../clickhouse/update_config_scripts/update_compose.py'],text=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
|
||||
stdout, stderr = process.communicate() # Wait for the process to finish and capture output
|
||||
print("Standard Output:", stdout)
|
||||
print("Standard Error:", stderr)
|
||||
# try:
|
||||
# all_services = subprocess.check_output(["sudo", "docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
|
||||
# except subprocess.CalledProcessError as e:
|
||||
# print(f"Command failed with return code {e.returncode}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
schedule.every(30).seconds.do(check_util_exec)
|
||||
# schedule.every(30).seconds.do(check_util_exec)
|
||||
# while True:
|
||||
# schedule.run_pending()
|
||||
# time.sleep(1)
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
time.sleep(1)
|
||||
|
||||
check_util_exec()
|
||||
time.sleep(30)
|
||||
|
||||
@ -61,6 +61,7 @@ services:
|
||||
- ../clickhouse/node1-config/:/etc/clickhouse-server/config.d/
|
||||
- ../clickhouse/node-entrypoints/main:/docker-entrypoint-initdb.d
|
||||
- ../preprocessing/geoip.csv:/var/lib/clickhouse/user_files/csv/ip_region_map.csv
|
||||
- ../preprocessing/geoip_cc.csv:/var/lib/clickhouse/user_files/csv/ip_region_cc_map.csv
|
||||
- clickhouse_server1_data:/var/lib/clickhouse
|
||||
- clickhouse_server1_TTL:/clickhouse_data/server1
|
||||
networks:
|
||||
|
||||
@ -19,9 +19,10 @@ SETTINGS storage_policy = 'hot_cold';
|
||||
CREATE TABLE ip_region_map (
|
||||
ip_range_start IPv4,
|
||||
ip_range_end IPv4,
|
||||
region LowCardinality(String),
|
||||
ip_range_cidr String MATERIALIZED IPv4RangeToCIDRString(ip_range_start, ip_range_end),
|
||||
INDEX region_idx region TYPE bloom_filter
|
||||
country_code LowCardinality(String),
|
||||
country LowCardinality(String),
|
||||
INDEX country_idx country TYPE bloom_filter
|
||||
) ENGINE = ReplicatedMergeTree(
|
||||
'/clickhouse/tables/{shard}/ip_region_map',
|
||||
'{replica}'
|
||||
@ -29,7 +30,7 @@ CREATE TABLE ip_region_map (
|
||||
ORDER BY ip_range_start;
|
||||
|
||||
CREATE DICTIONARY ip_region_dict
|
||||
(ip_range_cidr String, region String)
|
||||
(ip_range_cidr String, country_code String, country String)
|
||||
PRIMARY KEY ip_range_cidr
|
||||
SOURCE(CLICKHOUSE(TABLE 'ip_region_map'))
|
||||
LAYOUT(ip_trie)
|
||||
|
||||
@ -19,9 +19,10 @@ SETTINGS storage_policy = 'hot_cold';
|
||||
CREATE TABLE ip_region_map (
|
||||
ip_range_start IPv4,
|
||||
ip_range_end IPv4,
|
||||
region LowCardinality(String),
|
||||
ip_range_cidr String MATERIALIZED IPv4RangeToCIDRString(ip_range_start, ip_range_end),
|
||||
INDEX region_idx region TYPE bloom_filter
|
||||
country_code LowCardinality(String),
|
||||
country LowCardinality(String),
|
||||
INDEX country_idx country TYPE bloom_filter
|
||||
) ENGINE = ReplicatedMergeTree(
|
||||
'/clickhouse/tables/{shard}/ip_region_map',
|
||||
'{replica}'
|
||||
@ -29,7 +30,7 @@ CREATE TABLE ip_region_map (
|
||||
ORDER BY ip_range_start;
|
||||
|
||||
CREATE DICTIONARY ip_region_dict
|
||||
(ip_range_cidr String, region String)
|
||||
(ip_range_cidr String, country_code String, country String)
|
||||
PRIMARY KEY ip_range_cidr
|
||||
SOURCE(CLICKHOUSE(TABLE 'ip_region_map'))
|
||||
LAYOUT(ip_trie)
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
INSERT INTO ip_region_map (ip_range_start, ip_range_end, region)
|
||||
FROM INFILE '/var/lib/clickhouse/user_files/csv/ip_region_map.csv'
|
||||
INSERT INTO ip_region_map (ip_range_start, ip_range_end, country_code, country)
|
||||
FROM INFILE '/var/lib/clickhouse/user_files/csv/ip_region_cc_map.csv'
|
||||
FORMAT CSVWithNames;
|
||||
|
||||
@ -56,8 +56,10 @@ services:
|
||||
aliases:
|
||||
- data-streamer
|
||||
volumes:
|
||||
- "../preprocessing/10k_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
|
||||
command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv -l 0.1'"
|
||||
# - "../preprocessing/10k_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
|
||||
- "../preprocessing/1M_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
|
||||
command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv'"
|
||||
# command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv -l 0.1'"
|
||||
deploy:
|
||||
replicas: 1
|
||||
# placement:
|
||||
|
||||
@ -14,7 +14,7 @@ def int_to_ipv4(num: int) -> str:
|
||||
|
||||
# with open("IP2LOCATION-LITE-DB3.csv", "r") as input_file, open(
|
||||
with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
|
||||
"geoip.csv", "w", newline=""
|
||||
"geoip_cc.csv", "w", newline=""
|
||||
) as output_file:
|
||||
reader = csv.reader(input_file)
|
||||
writer = csv.writer(output_file)
|
||||
@ -24,6 +24,7 @@ with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
|
||||
[
|
||||
"ip_range_start",
|
||||
"ip_range_end",
|
||||
"country_code",
|
||||
"country",
|
||||
# "region",
|
||||
# "city",
|
||||
@ -35,6 +36,7 @@ with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
|
||||
new_record = [
|
||||
int_to_ipv4(int(record[0])),
|
||||
int_to_ipv4(int(record[1])),
|
||||
record[2],
|
||||
record[3],
|
||||
# record[4],
|
||||
# record[5],
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
while getopts "SMDT:A" flag; do
|
||||
while getopts "SMDUT:A" flag; do
|
||||
case "${flag}" in
|
||||
S) sudoRequired=true ;;
|
||||
M) masterNode=true ;;
|
||||
D) downStack=true ;;
|
||||
U) autoShard=true ;;
|
||||
T) swarmToken=$OPTARG ;;
|
||||
A) managerAddr=$OPTARG ;;
|
||||
esac
|
||||
@ -27,7 +28,7 @@ if [[ $downStack ]]; then
|
||||
$dockerCmd service rm registry
|
||||
sleep 20
|
||||
$dockerCmd volume rm $($dockerCmd volume ls --filter name=$stackName -q)
|
||||
elif ($masterNode); then
|
||||
elif [[ $masterNode ]]; then
|
||||
echo "[+] swarm master"
|
||||
$dockerCmd swarm init
|
||||
|
||||
@ -38,17 +39,16 @@ elif ($masterNode); then
|
||||
$dockerCmd build -t 127.0.0.1:5000/data-streamer:latest --push -f Dockerfile.python .
|
||||
|
||||
# execute
|
||||
chmod 774 ../clickhouse/node-entrypoints/*/00_wait_for_keeper.sh
|
||||
cd $scriptDir
|
||||
$dockerCmd stack deploy -d \
|
||||
-c ../preprocessing/docker-compose.yml \
|
||||
-c ../clickhouse/docker-compose.yaml \
|
||||
-c ../ui/docker-compose.yaml \
|
||||
$stackName
|
||||
|
||||
# scripts
|
||||
# pip install -r "$scriptDir/../final/config_update_scripts/requirements.txt"
|
||||
# cd $scriptDir/../preprocessing
|
||||
# python3 update_trigger.py
|
||||
elif [[ $autoShard ]]; then
|
||||
cd $scriptDir
|
||||
python3 $scriptDir/../clickhouse/config_update_scripts/update_trigger.py
|
||||
else
|
||||
echo "[+] swarm follower"
|
||||
echo "[+] joining swarm with token $swarmToken"
|
||||
|
||||
@ -21,6 +21,247 @@
|
||||
"id": 1,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-clickhouse-datasource",
|
||||
"uid": "PDEE91DDB90597936"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "opacity",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "smooth",
|
||||
"lineStyle": {
|
||||
"fill": "solid"
|
||||
},
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "dashed"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "percentage",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 10,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"sum"
|
||||
],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"timezone": [
|
||||
"Asia/Tokyo"
|
||||
],
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"editorType": "sql",
|
||||
"format": 1,
|
||||
"meta": {
|
||||
"builderOptions": {
|
||||
"columns": [],
|
||||
"database": "",
|
||||
"limit": 1000,
|
||||
"mode": "list",
|
||||
"queryType": "table",
|
||||
"table": ""
|
||||
}
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT\n toDate(time_stamp) AS \"Day\",\n l4_protocol AS \"IP Protocol\",\n SUM(pkt_len)/1024.0/1024.0 AS \"Bandwidth (MB)\"\nFROM traffic_records_all\nGROUP BY \"Day\", l4_protocol\nORDER BY \"Day\" ASC;\n",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Daily bandwidth trend",
|
||||
"transformations": [
|
||||
{
|
||||
"id": "prepareTimeSeries",
|
||||
"options": {
|
||||
"format": "multi"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-clickhouse-datasource",
|
||||
"uid": "PDEE91DDB90597936"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "opacity",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "smooth",
|
||||
"lineStyle": {
|
||||
"fill": "solid"
|
||||
},
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "dashed"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "percentage",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 60
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 0
|
||||
},
|
||||
"id": 9,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"timezone": [
|
||||
"Asia/Tokyo"
|
||||
],
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"editorType": "sql",
|
||||
"format": 1,
|
||||
"meta": {
|
||||
"builderOptions": {
|
||||
"columns": [],
|
||||
"database": "",
|
||||
"limit": 1000,
|
||||
"mode": "list",
|
||||
"queryType": "table",
|
||||
"table": ""
|
||||
}
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT\n toDate(time_stamp) AS \"Day\",\n l4_protocol AS \"IP Protocol\",\n COUNT(time_stamp) AS \"Packet count\"\nFROM traffic_records_all\nGROUP BY \"Day\", l4_protocol\nORDER BY \"Day\" ASC;\n",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Daily traffic trend",
|
||||
"transformations": [
|
||||
{
|
||||
"id": "prepareTimeSeries",
|
||||
"options": {
|
||||
"format": "multi"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-clickhouse-datasource",
|
||||
@ -56,7 +297,7 @@
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
"y": 8
|
||||
},
|
||||
"id": 8,
|
||||
"options": {
|
||||
@ -100,7 +341,7 @@
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT\n SUM(pkt_len)/1024.0/1024.0 AS \"Regional traffic bandwidth (MB)\",\n dictGet('ip_region_dict', 'region', tuple(src_ip)) AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic bandwidth (MB)\" DESC\nLIMIT 10",
|
||||
"rawSql": "SELECT\n SUM(pkt_len)/1024.0/1024.0 AS \"Regional traffic bandwidth (MB)\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).2 AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic bandwidth (MB)\" DESC\nLIMIT 10",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@ -142,7 +383,7 @@
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 9,
|
||||
"y": 0
|
||||
"y": 8
|
||||
},
|
||||
"id": 7,
|
||||
"options": {
|
||||
@ -186,7 +427,7 @@
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT\n COUNT(src_ip) AS \"Regional traffic\",\n dictGet('ip_region_dict', 'region', tuple(src_ip)) AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic\" DESC\nLIMIT 10",
|
||||
"rawSql": "SELECT\n COUNT(src_ip)/1000.0/1000.0 AS \"Regional traffic\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).2 AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic\" DESC\nLIMIT 10",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@ -218,7 +459,7 @@
|
||||
"h": 7,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 0
|
||||
"y": 8
|
||||
},
|
||||
"id": 6,
|
||||
"options": {
|
||||
@ -319,7 +560,7 @@
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
"y": 15
|
||||
},
|
||||
"id": 5,
|
||||
"options": {
|
||||
@ -361,7 +602,7 @@
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT Port,\r\n src_bw/1024.0/1024.0 AS \"Source port bandwidth (MB)\",\r\n dst_bw/1024.0/1024.0 AS \"Destination port bandwidth (MB)\"\r\nFROM (\r\n SELECT src_port AS Port,\r\n SUM(pkt_len) AS src_bw\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY src_bw DESC\r\n LIMIT 40\r\n ) AS src\r\n INNER JOIN (\r\n SELECT dst_port AS Port,\r\n SUM(pkt_len) AS dst_bw\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY dst_bw DESC\r\n LIMIT 40\r\n ) AS dst USING (Port)\r\nORDER BY (src_bw + dst_bw) DESC\r\nLIMIT 40;",
|
||||
"rawSql": "SELECT Port,\r\n src_bw/1024.0/1024.0 AS \"Source port bandwidth (MB)\",\r\n dst_bw/1024.0/1024.0 AS \"Destination port bandwidth (MB)\"\r\nFROM (\r\n SELECT src_port AS Port,\r\n SUM(pkt_len) AS src_bw\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY src_bw DESC\r\n LIMIT 20\r\n ) AS src\r\n INNER JOIN (\r\n SELECT dst_port AS Port,\r\n SUM(pkt_len) AS dst_bw\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY dst_bw DESC\r\n LIMIT 20\r\n ) AS dst USING (Port)\r\nORDER BY (src_bw + dst_bw) DESC\r\nLIMIT 20;",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@ -416,7 +657,7 @@
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 9,
|
||||
"y": 7
|
||||
"y": 15
|
||||
},
|
||||
"id": 4,
|
||||
"options": {
|
||||
@ -458,7 +699,7 @@
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT \r\n Port, \r\n SourcePortCount AS \"Source port frequency\",\r\n DestPortCount AS \"Destination port frequency\"\r\nFROM\r\n(\r\n SELECT \r\n src_port AS Port, \r\n COUNT(*) AS SourcePortCount\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY SourcePortCount DESC\r\n LIMIT 40\r\n) AS src\r\nINNER JOIN\r\n(\r\n SELECT \r\n dst_port AS Port, \r\n COUNT(*) AS DestPortCount\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY DestPortCount DESC\r\n LIMIT 40\r\n) AS dst\r\nUSING (Port)\r\nORDER BY (SourcePortCount + DestPortCount) DESC\r\nLIMIT 40;\r\n",
|
||||
"rawSql": "SELECT \r\n Port, \r\n SourcePortCount AS \"Source port frequency\",\r\n DestPortCount AS \"Destination port frequency\"\r\nFROM\r\n(\r\n SELECT \r\n src_port AS Port, \r\n COUNT(*) AS SourcePortCount\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY SourcePortCount DESC\r\n LIMIT 20\r\n) AS src\r\nINNER JOIN\r\n(\r\n SELECT \r\n dst_port AS Port, \r\n COUNT(*) AS DestPortCount\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY DestPortCount DESC\r\n LIMIT 20\r\n) AS dst\r\nUSING (Port)\r\nORDER BY (SourcePortCount + DestPortCount) DESC\r\nLIMIT 20;\r\n",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@ -490,7 +731,7 @@
|
||||
"h": 7,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 7
|
||||
"y": 15
|
||||
},
|
||||
"id": 1,
|
||||
"options": {
|
||||
@ -546,6 +787,144 @@
|
||||
],
|
||||
"title": "Distribution of L4 protocol (frequency)",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-clickhouse-datasource",
|
||||
"uid": "PDEE91DDB90597936"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"custom": {
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "percentage",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 16,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
},
|
||||
"id": 11,
|
||||
"options": {
|
||||
"basemap": {
|
||||
"config": {},
|
||||
"name": "Layer 0",
|
||||
"type": "default"
|
||||
},
|
||||
"controls": {
|
||||
"mouseWheelZoom": true,
|
||||
"showAttribution": true,
|
||||
"showDebug": false,
|
||||
"showMeasure": false,
|
||||
"showScale": false,
|
||||
"showZoom": true
|
||||
},
|
||||
"layers": [
|
||||
{
|
||||
"config": {
|
||||
"showLegend": true,
|
||||
"style": {
|
||||
"color": {
|
||||
"field": "cc",
|
||||
"fixed": "dark-green"
|
||||
},
|
||||
"opacity": 0.4,
|
||||
"rotation": {
|
||||
"fixed": 0,
|
||||
"max": 360,
|
||||
"min": -360,
|
||||
"mode": "mod"
|
||||
},
|
||||
"size": {
|
||||
"field": "Source",
|
||||
"fixed": 5,
|
||||
"max": 25,
|
||||
"min": 3
|
||||
},
|
||||
"symbol": {
|
||||
"fixed": "img/icons/marker/circle.svg",
|
||||
"mode": "fixed"
|
||||
},
|
||||
"symbolAlign": {
|
||||
"horizontal": "center",
|
||||
"vertical": "center"
|
||||
},
|
||||
"text": {
|
||||
"fixed": "",
|
||||
"mode": "field"
|
||||
},
|
||||
"textConfig": {
|
||||
"fontSize": 8,
|
||||
"offsetX": 0,
|
||||
"offsetY": 0,
|
||||
"textAlign": "center",
|
||||
"textBaseline": "middle"
|
||||
}
|
||||
}
|
||||
},
|
||||
"location": {
|
||||
"lookup": "cc",
|
||||
"mode": "lookup"
|
||||
},
|
||||
"name": "Markers",
|
||||
"tooltip": false,
|
||||
"type": "markers"
|
||||
}
|
||||
],
|
||||
"tooltip": {
|
||||
"mode": "details"
|
||||
},
|
||||
"view": {
|
||||
"allLayers": true,
|
||||
"id": "oceania",
|
||||
"lat": -10,
|
||||
"lon": -140,
|
||||
"zoom": 3
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"editorType": "sql",
|
||||
"format": 1,
|
||||
"meta": {
|
||||
"builderOptions": {
|
||||
"columns": [],
|
||||
"database": "",
|
||||
"limit": 1000,
|
||||
"mode": "list",
|
||||
"queryType": "table",
|
||||
"table": ""
|
||||
}
|
||||
},
|
||||
"pluginVersion": "4.5.1",
|
||||
"queryType": "table",
|
||||
"rawSql": "SELECT\n COUNT(src_ip) AS \"Source\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).1 AS cc\nFROM traffic_records_all\nGROUP BY cc\nORDER BY \"Source\" DESC;\n",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Traffic map",
|
||||
"type": "geomap"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
@ -556,13 +935,13 @@
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
"from": "2023-10-01T05:00:00.000Z",
|
||||
"to": "2023-10-31T05:00:03.000Z"
|
||||
},
|
||||
"timepicker": {},
|
||||
"timezone": "browser",
|
||||
"title": "Internet traffic capture analysis",
|
||||
"uid": "be59fkbp3zs3kc",
|
||||
"version": 1,
|
||||
"version": 4,
|
||||
"weekStart": ""
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user