diff --git a/clickhouse/config_update_scripts/requirements.txt b/clickhouse/config_update_scripts/requirements.txt
index 711648d..fe4d803 100644
--- a/clickhouse/config_update_scripts/requirements.txt
+++ b/clickhouse/config_update_scripts/requirements.txt
@@ -1,8 +1,3 @@
-subprocess
-json
jinja2
-yaml
-re
-xml
+pyyaml
schedule
-time
\ No newline at end of file
diff --git a/clickhouse/config_update_scripts/update_compose.py b/clickhouse/config_update_scripts/update_compose.py
index 66440e2..96e4e85 100644
--- a/clickhouse/config_update_scripts/update_compose.py
+++ b/clickhouse/config_update_scripts/update_compose.py
@@ -10,13 +10,14 @@ if __name__ == "__main__":
# extracting details of each running container in json format
try:
- all_services = subprocess.check_output(["docker","ps","--format","json"],text=True).split('\n')[:-1]
+ all_services = subprocess.check_output(["sudo", "docker","service","ls","--format","json"],text=True).split('\n')[:-1]
except subprocess.CalledProcessError as e:
print(f"Command failed with return code {e.returncode}")
all_services = [json.loads(s) for s in all_services]
# extracting the name, removing the custom id from it and storing it in a list
- all_service_names = [service['Names'].split('.')[0] for service in all_services if re.findall(r'clickhouse-server',service['Names'])]
+ # all_service_names = [service['Names'].split('.')[0] for service in all_services if re.findall(r'clickhouse-server',service['Names'])]
+ all_service_names = [service['Name'] for service in all_services if re.findall(r'clickhouse-server',service['Name'])]
# extracting only 'server1','server2'...
all_service_names = [ name.split('-')[-1] for name in all_service_names]
@@ -41,7 +42,7 @@ if __name__ == "__main__":
'''
# extracting existing remote-servers file
- with open('../node1-config/remote-servers.xml','r') as f:
+ with open('../clickhouse/node1-config/remote-servers.xml','r') as f:
curr_remote_servers_xml = ET.parse(f)
cluster_root = curr_remote_servers_xml.find('.//cluster_1S_2R')
@@ -49,20 +50,20 @@ if __name__ == "__main__":
cluster_root.append(new_shard_xml)
# creating folders for new servers that contain the configuration files
- os.makedirs(f'../node{curr_num_servers+1}-config',exist_ok=True)
- os.makedirs(f'../node{curr_num_servers+2}-config',exist_ok=True)
+ os.makedirs(f'../clickhouse/node{curr_num_servers+1}-config',exist_ok=True)
+ os.makedirs(f'../clickhouse/node{curr_num_servers+2}-config',exist_ok=True)
# adding the new shard to each remote-servers file
for i in range(1,curr_num_servers+3):
- output_path = f'../node{i}-config/remote-servers.xml'
+ output_path = f'../clickhouse/node{i}-config/remote-servers.xml'
curr_remote_servers_xml.write(output_path, encoding='utf-8', xml_declaration=False)
- env = Environment(loader=FileSystemLoader('../jinja-templates'))
+ env = Environment(loader=FileSystemLoader('../clickhouse/jinja-templates'))
service_template = env.get_template('service.yml.jinja')
volume_template = env.get_template('volume.yml.jinja')
# loading existing docker-compose file
- with open('../docker-compose.yaml','r') as f:
+ with open('../clickhouse/docker-compose.yaml','r') as f:
compose_f = yaml.safe_load(f)
# rendering the new service
@@ -79,7 +80,7 @@ if __name__ == "__main__":
compose_f['volumes'].update(new_volume2)
if compose_f:
- with open('../docker-compose.yaml','w') as yamlfile:
+ with open('../clickhouse/docker-compose.yaml','w') as yamlfile:
yaml.safe_dump(compose_f, yamlfile)
config_template = env.get_template('config.xml.jinja')
@@ -89,18 +90,18 @@ if __name__ == "__main__":
for i in range(1,3):
config_content = config_template.render(node_num=curr_num_servers+i)
- with open(f'../node{curr_num_servers + i}-config/config.xml','w') as f1:
+ with open(f'../clickhouse/node{curr_num_servers + i}-config/config.xml','w') as f1:
f1.write(config_content)
macros_content = macros_template.render(shard_num="0"+str(int(curr_num_shards+1)),replica_num=i)
- with open(f'../node{curr_num_servers + i}-config/macros.xml','w') as f2:
+ with open(f'../clickhouse/node{curr_num_servers + i}-config/macros.xml','w') as f2:
f2.write(macros_content)
use_keeper_content = use_keeper_template.render()
- with open(f'../node{curr_num_servers + i}-config/use-keeper.xml','w') as f3:
+ with open(f'../clickhouse/node{curr_num_servers + i}-config/use-keeper.xml','w') as f3:
f3.write(use_keeper_content)
storage_policy_content = storage_policy_template.render(server_num=curr_num_servers+i)
- with open(f'../node{curr_num_servers + i}-config/storage-policy.xml','w') as f4:
+ with open(f'../clickhouse/node{curr_num_servers + i}-config/storage-policy.xml','w') as f4:
f4.write(storage_policy_content)
diff --git a/clickhouse/config_update_scripts/update_trigger.py b/clickhouse/config_update_scripts/update_trigger.py
index fe154f5..41706f5 100644
--- a/clickhouse/config_update_scripts/update_trigger.py
+++ b/clickhouse/config_update_scripts/update_trigger.py
@@ -7,28 +7,34 @@ import time
def check_util_exec():
# extracting details of each running container in json format
try:
- all_services = subprocess.check_output(["docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
+ all_services = subprocess.check_output(["sudo", "docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
except subprocess.CalledProcessError as e:
print(f"Command failed with return code {e.returncode}")
all_services = [json.loads(s) for s in all_services]
- resource_util_exceed_flag = True # Flag to check if all of the containers have exceeded 80% memory utilization
+ resource_util_exceed_flag = True # Flag to check if all of the containers have exceeded 80% memory utilization
for service in all_services:
if re.findall(r'clickhouse-server',service['Name']):
- if float(service['MemPerc'][:-1]) < 80:
+ if float(service['MemPerc'][:-1]) < 60:
resource_util_exceed_flag = False
if resource_util_exceed_flag:
- process = subprocess.Popen(['python3','update_compose.py'],text=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
+ process = subprocess.Popen(['python3','../clickhouse/update_config_scripts/update_compose.py'],text=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
stdout, stderr = process.communicate() # Wait for the process to finish and capture output
print("Standard Output:", stdout)
print("Standard Error:", stderr)
+ # try:
+ # all_services = subprocess.check_output(["sudo", "docker","stats","--no-stream","--format","json"],text=True).split('\n')[:-1]
+ # except subprocess.CalledProcessError as e:
+ # print(f"Command failed with return code {e.returncode}")
if __name__ == "__main__":
- schedule.every(30).seconds.do(check_util_exec)
+ # schedule.every(30).seconds.do(check_util_exec)
+ # while True:
+ # schedule.run_pending()
+ # time.sleep(1)
while True:
- schedule.run_pending()
- time.sleep(1)
-
+ check_util_exec()
+ time.sleep(30)
\ No newline at end of file
diff --git a/clickhouse/docker-compose.yaml b/clickhouse/docker-compose.yaml
index dac8f4b..f9c9eaa 100644
--- a/clickhouse/docker-compose.yaml
+++ b/clickhouse/docker-compose.yaml
@@ -60,7 +60,8 @@ services:
volumes:
- ../clickhouse/node1-config/:/etc/clickhouse-server/config.d/
- ../clickhouse/node-entrypoints/main:/docker-entrypoint-initdb.d
- - ../preprocessing/geoip.csv:/tmp/seedData/csv/ip_region_map.csv
+ - ../preprocessing/geoip.csv:/var/lib/clickhouse/user_files/csv/ip_region_map.csv
+ - ../preprocessing/geoip_cc.csv:/var/lib/clickhouse/user_files/csv/ip_region_cc_map.csv
- clickhouse_server1_data:/var/lib/clickhouse
- clickhouse_server1_TTL:/clickhouse_data/server1
networks:
diff --git a/clickhouse/jinja-templates/service.yml.jinja b/clickhouse/jinja-templates/service.yml.jinja
index 6f1c4ba..061dd2b 100644
--- a/clickhouse/jinja-templates/service.yml.jinja
+++ b/clickhouse/jinja-templates/service.yml.jinja
@@ -2,8 +2,10 @@ clickhouse-server{{server_num}}:
image: clickhouse/clickhouse-server:latest
container_name: clickhouse-server{{server_num}}
volumes:
- - ./node{{server_num}}-config/:/etc/clickhouse-server/config.d/
+ - ../clickhouse/node{{server_num}}-config/:/etc/clickhouse-server/config.d/
+ - ../clickhouse/node-entrypoints/common:/docker-entrypoint-initdb.d
- clickhouse_server{{server_num}}_data:/var/lib/clickhouse
+ - clickhouse_server{{server_num}}_TTL:/clickhouse_data/server{{server_num}}
networks:
clickhouse-server-network:
aliases:
@@ -15,6 +17,8 @@ clickhouse-server{{server_num}}:
replicas: 1
# placement:
# constraints: [node.labels.role == server]
+ restart_policy:
+ condition: on-failure
update_config:
delay: 10s
resources:
@@ -26,5 +30,5 @@ clickhouse-server{{server_num}}:
- clickhouse-keeper2
- clickhouse-keeper3
ports:
- - "900{{server_num}}:9000" # Native client port
- - "8123:8123" # HTTP interface
+ - "{{9000+server_num}}:9000" # Native client port
+ - "{{8123+server_num}}:8123" # HTTP interface
diff --git a/clickhouse/jinja-templates/storage-policy.xml.jinja b/clickhouse/jinja-templates/storage-policy.xml.jinja
index 440d7e7..d9cc191 100644
--- a/clickhouse/jinja-templates/storage-policy.xml.jinja
+++ b/clickhouse/jinja-templates/storage-policy.xml.jinja
@@ -1,25 +1,27 @@
-
-
-
- /clickhouse_data{{server_num}}/hot
- 300000000
-
-
- /clickhouse_data{{server_num}}/cold
- 500000000
-
-
-
-
-
-
- hot_disk
-
-
- cold_disk
-
-
- 0.2
-
-
-
\ No newline at end of file
+
+
+
+
+ /clickhouse_data/server{{server_num}}/hot
+
+
+ /clickhouse_data/server{{server_num}}/cold
+
+
+
+
+
+
+ hot_disk
+ 1073741824
+
+
+ cold_disk
+ 1073741824
+
+
+ 0.2
+
+
+
+
diff --git a/clickhouse/node-entrypoints/common/01_udf_create.sql b/clickhouse/node-entrypoints/common/01_udf_create.sql
new file mode 100644
index 0000000..5a5fa17
--- /dev/null
+++ b/clickhouse/node-entrypoints/common/01_udf_create.sql
@@ -0,0 +1,22 @@
+-- https://clickhouse.com/blog/geolocating-ips-in-clickhouse-and-grafana#using-bit-functions-to-convert-ip-ranges-to-cidr-notation
+
+CREATE FUNCTION unmatchedBits AS (ip_s, ip_e) -> if(
+ bitXor(ip_s, ip_e) != 0,
+ ceil(log2(bitXor(ip_s, ip_e))), 0
+);
+
+CREATE FUNCTION cidrSuffix AS (ip_s, ip_e) -> 32 - unmatchedBits(ip_s, ip_e);
+
+CREATE FUNCTION cidrAddress AS (ip_s, ip_e) -> toIPv4(
+ bitAnd(
+ bitNot(pow(2, unmatchedBits(ip_s, ip_e)) - 1),
+ ip_s
+ )::UInt64
+);
+
+CREATE FUNCTION IPv4RangeToCIDRString AS (ip_s, ip_e) -> CONCAT(
+ toString(cidrAddress(ip_s, ip_e)),
+ '/',
+ toString(cidrSuffix(ip_s, ip_e))
+);
+
diff --git a/clickhouse/node-entrypoints/common/01_table_create.sql b/clickhouse/node-entrypoints/common/02_table_dict_create.sql
similarity index 56%
rename from clickhouse/node-entrypoints/common/01_table_create.sql
rename to clickhouse/node-entrypoints/common/02_table_dict_create.sql
index 0f90004..641a7bc 100644
--- a/clickhouse/node-entrypoints/common/01_table_create.sql
+++ b/clickhouse/node-entrypoints/common/02_table_dict_create.sql
@@ -13,16 +13,25 @@ CREATE TABLE traffic_records (
'{replica}'
)
ORDER BY time_stamp
-TTL toDateTime(time_stamp) + INTERVAL 15 DAY TO VOLUME 'cold_vol'
+TTL toDateTime(time_stamp) + INTERVAL 410 DAY TO VOLUME 'cold_vol' -- october 15
SETTINGS storage_policy = 'hot_cold';
CREATE TABLE ip_region_map (
ip_range_start IPv4,
ip_range_end IPv4,
- region LowCardinality(String),
- INDEX region_idx region TYPE bloom_filter
+ ip_range_cidr String MATERIALIZED IPv4RangeToCIDRString(ip_range_start, ip_range_end),
+ country_code LowCardinality(String),
+ country LowCardinality(String),
+ INDEX country_idx country TYPE bloom_filter
) ENGINE = ReplicatedMergeTree(
'/clickhouse/tables/{shard}/ip_region_map',
'{replica}'
)
-ORDER BY ip_range_start;
\ No newline at end of file
+ORDER BY ip_range_start;
+
+CREATE DICTIONARY ip_region_dict
+(ip_range_cidr String, country_code String, country String)
+PRIMARY KEY ip_range_cidr
+SOURCE(CLICKHOUSE(TABLE 'ip_region_map'))
+LAYOUT(ip_trie)
+LIFETIME(3600);
diff --git a/clickhouse/node-entrypoints/main/01_udf_create.sql b/clickhouse/node-entrypoints/main/01_udf_create.sql
new file mode 100644
index 0000000..5a5fa17
--- /dev/null
+++ b/clickhouse/node-entrypoints/main/01_udf_create.sql
@@ -0,0 +1,22 @@
+-- https://clickhouse.com/blog/geolocating-ips-in-clickhouse-and-grafana#using-bit-functions-to-convert-ip-ranges-to-cidr-notation
+
+CREATE FUNCTION unmatchedBits AS (ip_s, ip_e) -> if(
+ bitXor(ip_s, ip_e) != 0,
+ ceil(log2(bitXor(ip_s, ip_e))), 0
+);
+
+CREATE FUNCTION cidrSuffix AS (ip_s, ip_e) -> 32 - unmatchedBits(ip_s, ip_e);
+
+CREATE FUNCTION cidrAddress AS (ip_s, ip_e) -> toIPv4(
+ bitAnd(
+ bitNot(pow(2, unmatchedBits(ip_s, ip_e)) - 1),
+ ip_s
+ )::UInt64
+);
+
+CREATE FUNCTION IPv4RangeToCIDRString AS (ip_s, ip_e) -> CONCAT(
+ toString(cidrAddress(ip_s, ip_e)),
+ '/',
+ toString(cidrSuffix(ip_s, ip_e))
+);
+
diff --git a/clickhouse/node-entrypoints/main/01_table_create.sql b/clickhouse/node-entrypoints/main/02_table_dict_create.sql
similarity index 56%
rename from clickhouse/node-entrypoints/main/01_table_create.sql
rename to clickhouse/node-entrypoints/main/02_table_dict_create.sql
index 0f90004..641a7bc 100644
--- a/clickhouse/node-entrypoints/main/01_table_create.sql
+++ b/clickhouse/node-entrypoints/main/02_table_dict_create.sql
@@ -13,16 +13,25 @@ CREATE TABLE traffic_records (
'{replica}'
)
ORDER BY time_stamp
-TTL toDateTime(time_stamp) + INTERVAL 15 DAY TO VOLUME 'cold_vol'
+TTL toDateTime(time_stamp) + INTERVAL 410 DAY TO VOLUME 'cold_vol' -- october 15
SETTINGS storage_policy = 'hot_cold';
CREATE TABLE ip_region_map (
ip_range_start IPv4,
ip_range_end IPv4,
- region LowCardinality(String),
- INDEX region_idx region TYPE bloom_filter
+ ip_range_cidr String MATERIALIZED IPv4RangeToCIDRString(ip_range_start, ip_range_end),
+ country_code LowCardinality(String),
+ country LowCardinality(String),
+ INDEX country_idx country TYPE bloom_filter
) ENGINE = ReplicatedMergeTree(
'/clickhouse/tables/{shard}/ip_region_map',
'{replica}'
)
-ORDER BY ip_range_start;
\ No newline at end of file
+ORDER BY ip_range_start;
+
+CREATE DICTIONARY ip_region_dict
+(ip_range_cidr String, country_code String, country String)
+PRIMARY KEY ip_range_cidr
+SOURCE(CLICKHOUSE(TABLE 'ip_region_map'))
+LAYOUT(ip_trie)
+LIFETIME(3600);
diff --git a/clickhouse/node-entrypoints/main/02_dist_table_create.sql b/clickhouse/node-entrypoints/main/03_dist_table_create.sql
similarity index 100%
rename from clickhouse/node-entrypoints/main/02_dist_table_create.sql
rename to clickhouse/node-entrypoints/main/03_dist_table_create.sql
diff --git a/clickhouse/node-entrypoints/main/03_insert_geoip_csv.sql b/clickhouse/node-entrypoints/main/03_insert_geoip_csv.sql
deleted file mode 100644
index 9a1ea5c..0000000
--- a/clickhouse/node-entrypoints/main/03_insert_geoip_csv.sql
+++ /dev/null
@@ -1,3 +0,0 @@
-INSERT INTO ip_region_map
-FROM INFILE '/tmp/seedData/csv/ip_region_map.csv'
-FORMAT CSVWithNames;
\ No newline at end of file
diff --git a/clickhouse/node-entrypoints/main/04_insert_geoip_csv.sql b/clickhouse/node-entrypoints/main/04_insert_geoip_csv.sql
new file mode 100644
index 0000000..a9e5098
--- /dev/null
+++ b/clickhouse/node-entrypoints/main/04_insert_geoip_csv.sql
@@ -0,0 +1,3 @@
+INSERT INTO ip_region_map (ip_range_start, ip_range_end, country_code, country)
+FROM INFILE '/var/lib/clickhouse/user_files/csv/ip_region_cc_map.csv'
+FORMAT CSVWithNames;
diff --git a/clickhouse/node-entrypoints/main/04_kafka_table_ingest.sql b/clickhouse/node-entrypoints/main/05_kafka_table_ingest.sql
similarity index 100%
rename from clickhouse/node-entrypoints/main/04_kafka_table_ingest.sql
rename to clickhouse/node-entrypoints/main/05_kafka_table_ingest.sql
diff --git a/preprocessing/docker-compose.yml b/preprocessing/docker-compose.yml
index 98bf52c..7944257 100644
--- a/preprocessing/docker-compose.yml
+++ b/preprocessing/docker-compose.yml
@@ -56,8 +56,10 @@ services:
aliases:
- data-streamer
volumes:
- - "../preprocessing/10k_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
- command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv -x --stream_size 100000 -l 0.1'"
+ # - "../preprocessing/10k_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
+ - "../preprocessing/1M_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro"
+ command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv'"
+ # command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv -l 0.1'"
deploy:
replicas: 1
# placement:
diff --git a/preprocessing/ip2loc_prep.py b/preprocessing/ip2loc_prep.py
index ebc130a..d20a27f 100644
--- a/preprocessing/ip2loc_prep.py
+++ b/preprocessing/ip2loc_prep.py
@@ -14,7 +14,7 @@ def int_to_ipv4(num: int) -> str:
# with open("IP2LOCATION-LITE-DB3.csv", "r") as input_file, open(
with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
- "geoip.csv", "w", newline=""
+ "geoip_cc.csv", "w", newline=""
) as output_file:
reader = csv.reader(input_file)
writer = csv.writer(output_file)
@@ -24,6 +24,7 @@ with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
[
"ip_range_start",
"ip_range_end",
+ "country_code",
"country",
# "region",
# "city",
@@ -35,6 +36,7 @@ with open("IP2LOCATION-LITE-DB1.csv", "r") as input_file, open(
new_record = [
int_to_ipv4(int(record[0])),
int_to_ipv4(int(record[1])),
+ record[2],
record[3],
# record[4],
# record[5],
diff --git a/scripts/deploy.sh b/scripts/deploy.sh
index 3409b90..ef97e1d 100755
--- a/scripts/deploy.sh
+++ b/scripts/deploy.sh
@@ -1,10 +1,11 @@
#!/bin/bash
-while getopts "SMDT:A" flag; do
+while getopts "SMDUT:A" flag; do
case "${flag}" in
S) sudoRequired=true ;;
M) masterNode=true ;;
D) downStack=true ;;
+ U) autoShard=true ;;
T) swarmToken=$OPTARG ;;
A) managerAddr=$OPTARG ;;
esac
@@ -27,7 +28,7 @@ if [[ $downStack ]]; then
$dockerCmd service rm registry
sleep 20
$dockerCmd volume rm $($dockerCmd volume ls --filter name=$stackName -q)
-elif ($masterNode); then
+elif [[ $masterNode ]]; then
echo "[+] swarm master"
$dockerCmd swarm init
@@ -38,17 +39,16 @@ elif ($masterNode); then
$dockerCmd build -t 127.0.0.1:5000/data-streamer:latest --push -f Dockerfile.python .
# execute
+ chmod 774 ../clickhouse/node-entrypoints/*/00_wait_for_keeper.sh
cd $scriptDir
$dockerCmd stack deploy -d \
-c ../preprocessing/docker-compose.yml \
-c ../clickhouse/docker-compose.yaml \
-c ../ui/docker-compose.yaml \
$stackName
-
- # scripts
- # pip install -r "$scriptDir/../final/config_update_scripts/requirements.txt"
- # cd $scriptDir/../preprocessing
- # python3 update_trigger.py
+elif [[ $autoShard ]]; then
+ cd $scriptDir
+ python3 $scriptDir/../clickhouse/config_update_scripts/update_trigger.py
else
echo "[+] swarm follower"
echo "[+] joining swarm with token $swarmToken"
diff --git a/ui/dashboard_1.json b/ui/dashboard_1.json
index 97e5ba7..446799d 100644
--- a/ui/dashboard_1.json
+++ b/ui/dashboard_1.json
@@ -21,6 +21,497 @@
"id": 1,
"links": [],
"panels": [
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "barWidthFactor": 0.6,
+ "drawStyle": "line",
+ "fillOpacity": 10,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "smooth",
+ "lineStyle": {
+ "fill": "solid"
+ },
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "dashed"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 0
+ },
+ "id": 10,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "sum"
+ ],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "timezone": [
+ "Asia/Tokyo"
+ ],
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT\n toDate(time_stamp) AS \"Day\",\n l4_protocol AS \"IP Protocol\",\n SUM(pkt_len)/1024.0/1024.0 AS \"Bandwidth (MB)\"\nFROM traffic_records_all\nGROUP BY \"Day\", l4_protocol\nORDER BY \"Day\" ASC;\n",
+ "refId": "A"
+ }
+ ],
+ "title": "Daily bandwidth trend",
+ "transformations": [
+ {
+ "id": "prepareTimeSeries",
+ "options": {
+ "format": "multi"
+ }
+ }
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "barWidthFactor": 0.6,
+ "drawStyle": "line",
+ "fillOpacity": 10,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "smooth",
+ "lineStyle": {
+ "fill": "solid"
+ },
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "dashed"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "yellow",
+ "value": 60
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 0
+ },
+ "id": 9,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "timezone": [
+ "Asia/Tokyo"
+ ],
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT\n toDate(time_stamp) AS \"Day\",\n l4_protocol AS \"IP Protocol\",\n COUNT(time_stamp) AS \"Packet count\"\nFROM traffic_records_all\nGROUP BY \"Day\", l4_protocol\nORDER BY \"Day\" ASC;\n",
+ "refId": "A"
+ }
+ ],
+ "title": "Daily traffic trend",
+ "transformations": [
+ {
+ "id": "prepareTimeSeries",
+ "options": {
+ "format": "multi"
+ }
+ }
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "yellow",
+ "value": 40
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 9,
+ "x": 0,
+ "y": 8
+ },
+ "id": 8,
+ "options": {
+ "displayMode": "lcd",
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
+ },
+ "maxVizHeight": 300,
+ "minVizHeight": 16,
+ "minVizWidth": 8,
+ "namePlacement": "auto",
+ "orientation": "horizontal",
+ "reduceOptions": {
+ "calcs": [
+ "lastNotNull"
+ ],
+ "fields": "/^Regional traffic bandwidth \\(MB\\)$/",
+ "values": true
+ },
+ "showUnfilled": true,
+ "sizing": "auto",
+ "valueMode": "color"
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT\n SUM(pkt_len)/1024.0/1024.0 AS \"Regional traffic bandwidth (MB)\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).2 AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic bandwidth (MB)\" DESC\nLIMIT 10",
+ "refId": "A"
+ }
+ ],
+ "title": "Top regions (bandwidth)",
+ "type": "bargauge"
+ },
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 40
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 9,
+ "x": 9,
+ "y": 8
+ },
+ "id": 7,
+ "options": {
+ "displayMode": "lcd",
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
+ },
+ "maxVizHeight": 300,
+ "minVizHeight": 16,
+ "minVizWidth": 8,
+ "namePlacement": "auto",
+ "orientation": "horizontal",
+ "reduceOptions": {
+ "calcs": [
+ "lastNotNull"
+ ],
+ "fields": "/^Regional traffic$/",
+ "values": true
+ },
+ "showUnfilled": true,
+ "sizing": "auto",
+ "valueMode": "color"
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT\n COUNT(src_ip)/1000.0/1000.0 AS \"Regional traffic\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).2 AS region\nFROM traffic_records_all\nGROUP BY region\nORDER BY \"Regional traffic\" DESC\nLIMIT 10",
+ "refId": "A"
+ }
+ ],
+ "title": "Top regions (packet count)",
+ "type": "bargauge"
+ },
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "mappings": []
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 6,
+ "x": 18,
+ "y": 8
+ },
+ "id": 6,
+ "options": {
+ "displayLabels": [
+ "percent",
+ "name"
+ ],
+ "legend": {
+ "displayMode": "list",
+ "placement": "right",
+ "showLegend": true,
+ "values": [
+ "percent"
+ ]
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "lastNotNull"
+ ],
+ "fields": "/^Protocol bandwidth$/",
+ "values": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT\n l4_protocol as Protocol,\n SUM(pkt_len)/1024.0/1024.0 as \"Protocol bandwidth\"\n FROM traffic_records_all\n GROUP BY Protocol",
+ "refId": "A"
+ }
+ ],
+ "title": "Distribution of L4 protocol (bandwidth)",
+ "type": "piechart"
+ },
{
"datasource": {
"type": "grafana-clickhouse-datasource",
@@ -38,7 +529,7 @@
"axisLabel": "",
"axisPlacement": "auto",
"fillOpacity": 80,
- "gradientMode": "none",
+ "gradientMode": "hue",
"hideFrom": {
"legend": false,
"tooltip": false,
@@ -66,10 +557,10 @@
"overrides": []
},
"gridPos": {
- "h": 8,
- "w": 18,
+ "h": 7,
+ "w": 9,
"x": 0,
- "y": 0
+ "y": 15
},
"id": 5,
"options": {
@@ -111,13 +602,110 @@
},
"pluginVersion": "4.5.1",
"queryType": "table",
- "rawSql": "SELECT Port,\r\n src_bw/1024.0/1024.0 AS \"Source Port Bandwidth (MB)\",\r\n dst_bw/1024.0/1024.0 AS \"Destination Port Bandwidth (MB)\"\r\nFROM (\r\n SELECT src_port AS Port,\r\n SUM(pkt_len) AS src_bw\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY src_bw DESC\r\n LIMIT 40\r\n ) AS src\r\n INNER JOIN (\r\n SELECT dst_port AS Port,\r\n SUM(pkt_len) AS dst_bw\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY dst_bw DESC\r\n LIMIT 40\r\n ) AS dst USING (Port)\r\nORDER BY (src_bw + dst_bw) DESC\r\nLIMIT 40;",
+ "rawSql": "SELECT Port,\r\n src_bw/1024.0/1024.0 AS \"Source port bandwidth (MB)\",\r\n dst_bw/1024.0/1024.0 AS \"Destination port bandwidth (MB)\"\r\nFROM (\r\n SELECT src_port AS Port,\r\n SUM(pkt_len) AS src_bw\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY src_bw DESC\r\n LIMIT 20\r\n ) AS src\r\n INNER JOIN (\r\n SELECT dst_port AS Port,\r\n SUM(pkt_len) AS dst_bw\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY dst_bw DESC\r\n LIMIT 20\r\n ) AS dst USING (Port)\r\nORDER BY (src_bw + dst_bw) DESC\r\nLIMIT 20;",
"refId": "A"
}
],
"title": "Top ports (by bandwidth)",
"type": "barchart"
},
+ {
+ "datasource": {
+ "type": "grafana-clickhouse-datasource",
+ "uid": "PDEE91DDB90597936"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "fillOpacity": 80,
+ "gradientMode": "hue",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineWidth": 1,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 9,
+ "x": 9,
+ "y": 15
+ },
+ "id": 4,
+ "options": {
+ "barRadius": 0,
+ "barWidth": 0.9,
+ "fullHighlight": false,
+ "groupWidth": 0.7,
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "orientation": "auto",
+ "showValue": "never",
+ "stacking": "normal",
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ },
+ "xField": "Port",
+ "xTickLabelRotation": 0,
+ "xTickLabelSpacing": 0
+ },
+ "pluginVersion": "11.3.1",
+ "targets": [
+ {
+ "editorType": "sql",
+ "format": 1,
+ "meta": {
+ "builderOptions": {
+ "columns": [],
+ "database": "",
+ "limit": 1000,
+ "mode": "list",
+ "queryType": "table",
+ "table": ""
+ }
+ },
+ "pluginVersion": "4.5.1",
+ "queryType": "table",
+ "rawSql": "SELECT \r\n Port, \r\n SourcePortCount AS \"Source port frequency\",\r\n DestPortCount AS \"Destination port frequency\"\r\nFROM\r\n(\r\n SELECT \r\n src_port AS Port, \r\n COUNT(*) AS SourcePortCount\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY SourcePortCount DESC\r\n LIMIT 20\r\n) AS src\r\nINNER JOIN\r\n(\r\n SELECT \r\n dst_port AS Port, \r\n COUNT(*) AS DestPortCount\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY DestPortCount DESC\r\n LIMIT 20\r\n) AS dst\r\nUSING (Port)\r\nORDER BY (SourcePortCount + DestPortCount) DESC\r\nLIMIT 20;\r\n",
+ "refId": "A"
+ }
+ ],
+ "title": "Top ports (frequency)",
+ "type": "barchart"
+ },
{
"datasource": {
"type": "grafana-clickhouse-datasource",
@@ -140,10 +728,10 @@
"overrides": []
},
"gridPos": {
- "h": 8,
+ "h": 7,
"w": 6,
"x": 18,
- "y": 0
+ "y": 15
},
"id": 1,
"options": {
@@ -153,7 +741,7 @@
],
"legend": {
"displayMode": "list",
- "placement": "bottom",
+ "placement": "right",
"showLegend": true,
"values": [
"percent"
@@ -208,32 +796,18 @@
"fieldConfig": {
"defaults": {
"color": {
- "mode": "palette-classic"
+ "mode": "thresholds"
},
"custom": {
- "axisBorderShow": false,
- "axisCenteredZero": false,
- "axisColorMode": "text",
- "axisLabel": "",
- "axisPlacement": "auto",
- "fillOpacity": 100,
- "gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
- },
- "lineWidth": 1,
- "scaleDistribution": {
- "type": "linear"
- },
- "thresholdsStyle": {
- "mode": "off"
}
},
"mappings": [],
"thresholds": {
- "mode": "absolute",
+ "mode": "percentage",
"steps": [
{
"color": "green",
@@ -245,110 +819,87 @@
"overrides": []
},
"gridPos": {
- "h": 8,
- "w": 18,
+ "h": 16,
+ "w": 24,
"x": 0,
- "y": 8
+ "y": 22
},
- "id": 4,
+ "id": 11,
"options": {
- "barRadius": 0,
- "barWidth": 0.9,
- "fullHighlight": false,
- "groupWidth": 0.7,
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom",
- "showLegend": true
+ "basemap": {
+ "config": {},
+ "name": "Layer 0",
+ "type": "default"
},
- "orientation": "auto",
- "showValue": "never",
- "stacking": "normal",
- "tooltip": {
- "mode": "single",
- "sort": "none"
+ "controls": {
+ "mouseWheelZoom": true,
+ "showAttribution": true,
+ "showDebug": false,
+ "showMeasure": false,
+ "showScale": false,
+ "showZoom": true
},
- "xField": "Port",
- "xTickLabelRotation": 0,
- "xTickLabelSpacing": 0
- },
- "pluginVersion": "11.3.1",
- "targets": [
- {
- "editorType": "sql",
- "format": 1,
- "meta": {
- "builderOptions": {
- "columns": [],
- "database": "",
- "limit": 1000,
- "mode": "list",
- "queryType": "table",
- "table": ""
- }
- },
- "pluginVersion": "4.5.1",
- "queryType": "table",
- "rawSql": "SELECT \r\n Port, \r\n SourcePortCount AS \"Source port frequency\",\r\n DestPortCount AS \"Destination port frequency\"\r\nFROM\r\n(\r\n SELECT \r\n src_port AS Port, \r\n COUNT(*) AS SourcePortCount\r\n FROM traffic_records_all\r\n GROUP BY src_port\r\n ORDER BY SourcePortCount DESC\r\n LIMIT 40\r\n) AS src\r\nINNER JOIN\r\n(\r\n SELECT \r\n dst_port AS Port, \r\n COUNT(*) AS DestPortCount\r\n FROM traffic_records_all\r\n GROUP BY dst_port\r\n ORDER BY DestPortCount DESC\r\n LIMIT 40\r\n) AS dst\r\nUSING (Port)\r\nORDER BY (SourcePortCount + DestPortCount) DESC\r\nLIMIT 40;\r\n",
- "refId": "A"
- }
- ],
- "title": "Top ports (frequency)",
- "type": "barchart"
- },
- {
- "datasource": {
- "type": "grafana-clickhouse-datasource",
- "uid": "PDEE91DDB90597936"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- }
- },
- "mappings": []
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 6,
- "x": 18,
- "y": 8
- },
- "id": 6,
- "options": {
- "displayLabels": [
- "percent",
- "name"
+ "layers": [
+ {
+ "config": {
+ "showLegend": true,
+ "style": {
+ "color": {
+ "field": "cc",
+ "fixed": "dark-green"
+ },
+ "opacity": 0.4,
+ "rotation": {
+ "fixed": 0,
+ "max": 360,
+ "min": -360,
+ "mode": "mod"
+ },
+ "size": {
+ "field": "Source",
+ "fixed": 5,
+ "max": 25,
+ "min": 3
+ },
+ "symbol": {
+ "fixed": "img/icons/marker/circle.svg",
+ "mode": "fixed"
+ },
+ "symbolAlign": {
+ "horizontal": "center",
+ "vertical": "center"
+ },
+ "text": {
+ "fixed": "",
+ "mode": "field"
+ },
+ "textConfig": {
+ "fontSize": 8,
+ "offsetX": 0,
+ "offsetY": 0,
+ "textAlign": "center",
+ "textBaseline": "middle"
+ }
+ }
+ },
+ "location": {
+ "lookup": "cc",
+ "mode": "lookup"
+ },
+ "name": "Markers",
+ "tooltip": false,
+ "type": "markers"
+ }
],
- "legend": {
- "displayMode": "list",
- "placement": "bottom",
- "showLegend": true,
- "values": [
- "percent"
- ]
- },
- "pieType": "pie",
- "reduceOptions": {
- "calcs": [
- "lastNotNull"
- ],
- "fields": "/^Protocol bandwidth$/",
- "values": true
- },
"tooltip": {
- "mode": "single",
- "sort": "none"
+ "mode": "details"
+ },
+ "view": {
+ "allLayers": true,
+ "id": "oceania",
+ "lat": -10,
+ "lon": -140,
+ "zoom": 3
}
},
"pluginVersion": "11.3.1",
@@ -368,28 +919,29 @@
},
"pluginVersion": "4.5.1",
"queryType": "table",
- "rawSql": "SELECT\n l4_protocol as Protocol,\n SUM(pkt_len)/1024.0/1024.0 as \"Protocol bandwidth\"\n FROM traffic_records_all\n GROUP BY Protocol",
+ "rawSql": "SELECT\n COUNT(src_ip) AS \"Source\",\n dictGet('ip_region_dict', ('country_code', 'country'), tuple(src_ip)).1 AS cc\nFROM traffic_records_all\nGROUP BY cc\nORDER BY \"Source\" DESC;\n",
"refId": "A"
}
],
- "title": "Distribution of L4 protocol (bandwidth)",
- "type": "piechart"
+ "title": "Traffic map",
+ "type": "geomap"
}
],
"preload": false,
+ "refresh": "",
"schemaVersion": 40,
"tags": [],
"templating": {
"list": []
},
"time": {
- "from": "now-6h",
- "to": "now"
+ "from": "2023-10-01T05:00:00.000Z",
+ "to": "2023-10-31T05:00:03.000Z"
},
"timepicker": {},
"timezone": "browser",
"title": "Internet traffic capture analysis",
"uid": "be59fkbp3zs3kc",
- "version": 1,
+ "version": 4,
"weekStart": ""
}
\ No newline at end of file