docker-compose.yml fix

This commit is contained in:
AnirudhPI 2024-11-20 18:21:53 -07:00
parent 1df759f6a6
commit 4c04f4f0ef
2 changed files with 25 additions and 19 deletions

View File

@ -4,7 +4,9 @@ services:
zookeeper: zookeeper:
image: confluentinc/cp-zookeeper:latest image: confluentinc/cp-zookeeper:latest
networks: networks:
- kafka_network kafka_network:
aliases:
- zookeeper
deploy: deploy:
replicas: 1 replicas: 1
restart_policy: restart_policy:
@ -13,11 +15,6 @@ services:
ZOOKEEPER_CLIENT_PORT: 2181 ZOOKEEPER_CLIENT_PORT: 2181
ports: ports:
- "2181:2181" - "2181:2181"
healthcheck:
test: ["CMD", "nc", "-z", "localhost", "2181"]
interval: 10s
timeout: 5s
retries: 5
kafka: kafka:
image: confluentinc/cp-kafka:latest image: confluentinc/cp-kafka:latest
@ -26,34 +23,37 @@ services:
environment: environment:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092 KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT://localhost:9092 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_LISTENER_SECURITY_PROTOCOL: PLAINTEXT KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT
KAFKA_BROKER_ID: 1 KAFKA_BROKER_ID: 1
KAFKA_MESSAGE_MAX_BYTES: 200000000
KAFKA_REPLICA_FETCH_MAX_BYTES: 200000000
networks: networks:
- kafka_network kafka_network:
aliases:
- kafka
ports: ports:
- "9092:9092" - "9092:9092"
volumes:
- kafka_data:/var/lib/kafka/data
deploy: deploy:
replicas: 1 replicas: 1
restart_policy: restart_policy:
condition: on-failure condition: on-failure
healthcheck:
test: ["CMD", "nc", "-z", "localhost", "9092"]
interval: 10s
timeout: 5s
retries: 5
pcap_streamer: pcap_streamer:
image: levenshtein/streamer_test3:latest image: levenshtein/streamer_test3:latest
depends_on: depends_on:
- kafka - kafka
networks: networks:
- kafka_network kafka_network:
aliases:
- pcap_streamer
volumes: volumes:
- "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project:/data/pcap" - "/host_mnt/c/Users/akash/storage/Asu/sem3/dds/project:/data/pcap"
environment: environment:
PCAP_FILE: /data/pcap/202310081400.pcap PCAP_FILE: /data/pcap/202310081400.pcap
command: ["sh", "-c", "sleep 10 && python /app/pcap_processor.py -f /data/pcap/202310081400.pcap -s --stream_size 1000"] command: ["sh", "-c", "sleep 30 && python /app/pcap_processor.py -f /data/pcap/202310081400.pcap -s --stream_size 1000"]
deploy: deploy:
replicas: 1 replicas: 1
restart_policy: restart_policy:
@ -62,3 +62,8 @@ services:
networks: networks:
kafka_network: kafka_network:
driver: overlay driver: overlay
attachable: true
volumes:
kafka_data:
driver: local

View File

@ -17,7 +17,8 @@ class KafkaClient:
self.topic_name = topic_name self.topic_name = topic_name
if mode == 'producer': if mode == 'producer':
self.client = KafkaProducer( self.client = KafkaProducer(
bootstrap_servers=['localhost:9092'], bootstrap_servers=['kafka:9092'],
max_request_size = 200000000,
api_version=(0,11,5), api_version=(0,11,5),
value_serializer=lambda x: json.dumps(x).encode('utf-8')) value_serializer=lambda x: json.dumps(x).encode('utf-8'))
elif mode == 'consumer' and topic_name is not None: elif mode == 'consumer' and topic_name is not None:
@ -31,7 +32,7 @@ class KafkaClient:
# Kafka Configuration # Kafka Configuration
KAFKA_TOPIC = 'pcap_stream' KAFKA_TOPIC = 'pcap_stream'
KAFKA_SERVER = 'localhost:9092' # Adjust to your Kafka server KAFKA_SERVER = 'kafka:9092' # Adjust to your Kafka server
#KAFKA_SERVER = 'kafka_service:9092' #KAFKA_SERVER = 'kafka_service:9092'
# Initialize Kafka Producer # Initialize Kafka Producer