Fixed integration issue with csv streaming

This commit is contained in:
Akash Sivakumar
2024-11-26 21:41:17 -07:00
parent 5d20e14dbf
commit b1fc1dbc49
5 changed files with 11 additions and 10 deletions

View File

@@ -74,7 +74,7 @@ docker exec -it clickhouse-kafka-1 kafka-console-consumer --bootstrap-server kaf
Get into the ClickHouse client:
```bash
docker exec -it clickhouse-client clickhouse-client
docker exec -it <server1's container ID from docker ps> clickhouse-client
```
Check if tables are available:
```bash

View File

@@ -21,7 +21,7 @@
<keeper_server>
<tcp_port>9181</tcp_port>
<server_id>3</server_id>
<server_id>2</server_id>
<log_storage_path>/var/lib/clickhouse/coordination/log</log_storage_path>
<snapshot_storage_path>/var/lib/clickhouse/coordination/snapshots</snapshot_storage_path>
<coordination_settings>

View File

@@ -125,7 +125,7 @@ services:
condition: on-failure
pcap_streamer:
image: levenshtein/streamer_test4:latest
image: levenshtein/streamer_test7:latest
depends_on:
- kafka
networks:
@@ -133,10 +133,9 @@ services:
aliases:
- pcap_streamer
volumes:
- "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project:/data/pcap"
environment:
PCAP_FILE: /data/pcap/202310081400.pcap
command: ["sh", "-c", "sleep 30 && python /app/pcap_processor.py -f /data/pcap/202310081400.pcap -s --stream_size 1000"]
#- "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project:/data/pcap"
- "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project/project_github/real-time-traffic-analysis-clickhouse/preprocessing:/data/pcap"
command: ["sh", "-c", "sleep 60 && python /app/pcap_processor.py -c /data/pcap/sample_output.csv -s --stream_size 1000"]
deploy:
replicas: 1
restart_policy: