diff --git a/preprocessing/docker-compose.yml b/preprocessing/docker-compose.yml index d42305b..6ed8638 100644 --- a/preprocessing/docker-compose.yml +++ b/preprocessing/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3.8' - services: zookeeper: image: confluentinc/cp-zookeeper:latest @@ -41,22 +39,17 @@ services: restart_policy: condition: on-failure - pcap_streamer: - image: levenshtein/streamer_test3:latest + data_streamer: + image: 127.0.0.1:5000/data_streamer:latest depends_on: - kafka networks: kafka_network: aliases: - - pcap_streamer + - data_streamer volumes: - # - "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project:/data/pcap" - - "./:/data/pcap" - - "./:/data/csv" - environment: - PCAP_FILE: /data/pcap/202310081400.pcap - # command: ["sh", "-c", "sleep 30 && python /app/pcap_processor.py -f /data/pcap/202310081400.pcap -s --stream_size 1000"] - command: ["sh", "-c", "sleep 30 && python /app/pcap_processor.py -c /data/csv/sample_output.csv -s --stream_size 1000"] + - "./10k_sample_2023_10_01-2023_10_31.csv:/data/csv/main.csv:ro" + command: "sh -c 'sleep 30 && python /app/pcap_processor.py -c /data/csv/main.csv -x --stream_size 100000'" deploy: replicas: 1 restart_policy: @@ -69,4 +62,4 @@ networks: volumes: kafka_data: - driver: local \ No newline at end of file + driver: local diff --git a/preprocessing/pcap_processor.py b/preprocessing/pcap_processor.py index 0054b86..0fd40af 100644 --- a/preprocessing/pcap_processor.py +++ b/preprocessing/pcap_processor.py @@ -208,7 +208,7 @@ if __name__ == "__main__": producer.client.send(KAFKA_TOPIC, row_to_dict(row)) dbg_print(row_to_dict(row)) print("streamed packet", idx) - if idx > sample_size: + if sample and idx > sample_size: break print(f"total streamed: {idx}") diff --git a/scripts/deploy.ps1 b/scripts/deploy.ps1 new file mode 100644 index 0000000..e8554c1 --- /dev/null +++ b/scripts/deploy.ps1 @@ -0,0 +1,34 @@ +param ( + [switch] $MasterNode, + [string] $SwarmToken, + [string] $ManagerAddr +) + +$script_dir = $PSScriptRoot +# Write-Output $script_dir # ===> \Project\scripts + +if ($MasterNode) { + Write-Output "Initializing Docker Swarm..." + + docker stack rm test_datastreamer_automated + docker service rm registry + + # registry + Set-Location $script_dir/../preprocessing + + docker service create --name registry -p 5000:5000 registry:2 + docker build -t 127.0.0.1:5000/data_streamer:latest --no-cache --push -f Dockerfile.python . + + docker stack deploy -d -c docker-compose.yml test_datastreamer_automated + + Set-Location $script_dir + + # data streaming + + # pip install -r "$script_dir/../final/config_update_scripts/requirements.txt" +} +else { + Write-Output "swarm follower" + Write-Output "joining swarm with token $SwarmToken" + docker swarm join --token $SwarmToken $ManagerAddr +}