Configured Docker setup and Docker Compose for all services

parent 4b880ddf
version: '3'
services:
zookeeper:
image: wurstmeister/zookeeper
container_name: zookeeper
restart: always
ports:
- 2181:2181
kafka:
image: wurstmeister/kafka
container_name: kafka
restart: always
ports:
- 9092:9092
depends_on:
- zookeeper
links:
- zookeeper:zookeeper
environment:
KAFKA_ADVERTISED_HOST_NAME: kafka
KAFKA_ADVERTISED_PORT: 9092
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
spark:
image: bitnami/spark:2
environment:
- SPARK_MODE=master
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=nodco
- SPARK_SSL_ENABLED=no
depends_on:
- mongodb-docker
mongodb-docker:
image: mongo:latest
container_name: mongodb-docker
restart: always
ports:
- 27017:27017
volumes:
- ./data/db:/data/db
hostname: mongodb-docker
omd-stream-processor:
image: omd-stream-processor
container_name: omd-stream-processor
restart: always
ports:
- 8084:8084
depends_on:
- spark
- mongodb-docker
omd-dashboard:
image: omd-dashboard
container_name: omd-dashboard
restart: always
ports:
- 8082:8082
depends_on:
- mongodb-docker
omd-checkout-service:
image: omd-checkout-service
container_name: omd-checkout-service
restart: always
ports:
- 8081:8081
depends_on:
- kafka
links:
- kafka:kafka
FROM openjdk:8
ADD target/omd-checkout-service.jar omd-checkout-service.jar
EXPOSE 8081
ENTRYPOINT ["java", "-jar", "omd-checkout-service.jar"]
\ No newline at end of file
......@@ -86,6 +86,9 @@
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<finalName>omd-checkout-service</finalName>
</configuration>
</plugin>
</plugins>
</build>
......
......@@ -71,7 +71,7 @@ app.paymentMethod.paymentDetail.RequestAmount=1000.00
#Kafka Properties
bootstrap-server = localhost:9092
bootstrap-server = kafka:9092
topic-name = TOPIC_OMD_ORDER_DATA
# Time intervals
......
FROM openjdk:8
ADD target/omd-dashboard.jar omd-dashboard.jar
EXPOSE 8082
ENTRYPOINT ["java", "-jar", "omd-dashboard.jar"]
\ No newline at end of file
......@@ -28,6 +28,11 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>2.12.3</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
......@@ -69,6 +74,9 @@
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<finalName>omd-dashboard</finalName>
</configuration>
</plugin>
</plugins>
</build>
......
......@@ -4,6 +4,6 @@ mongodb-script-1=inserts.jnosql
bezkoder.app.jwtSecret= bezKoderSecretKey
bezkoder.app.jwtExpirationMs= 86400000
server.port=8082
spring.data.mongodb.host=localhost
spring.data.mongodb.host=mongodb-docker
spring.data.mongodb.port=27017
spring.data.mongodb.database=Orders
\ No newline at end of file
FROM openjdk:8
ADD target/omd-notifications.jar omd-notifications.jar
EXPOSE 8089
ENTRYPOINT ["java", "-jar", "omd-notifications.jar"]
\ No newline at end of file
package com.nisum.omd;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class OmdNotificationsApplicationTests {
@Test
void contextLoads() {
}
}
FROM openjdk:8
ADD target/omd-stream-processor.jar omd-stream-processor.jar
EXPOSE 8084
ENTRYPOINT ["java", "-jar", "omd-stream-processor.jar"]
\ No newline at end of file
......@@ -53,6 +53,15 @@
<artifactId>mongo-spark-connector_2.11</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>2.12.3</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
......@@ -71,6 +80,9 @@
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<finalName>omd-stream-processor</finalName>
</configuration>
</plugin>
</plugins>
</build>
......
......@@ -3,12 +3,13 @@ package com.nisum.omd;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import com.nisum.omd.spark.service.OmdSparkStreamService;
@SpringBootApplication
@SpringBootApplication(exclude={MongoAutoConfiguration.class})
public class OmdStreamProcessorApplication {
public static void main(String[] args) {
......
......@@ -5,13 +5,13 @@ public class OmdKafkaUtility {
public static final String APPLICATION_NAME = "Streaming Order DStream";
//public static final String HADOOP_HOME_DIR_VALUE = "C:/winutils";
public static final String HADOOP_HOME_DIR_VALUE = "/usr/rdhane/server/spark-3.0.0-preview2-bin-hadoop2.7/bin";
public static final String RUN_LOCAL_WITH_AVAILABLE_CORES = "local[*]";
public static final String RUN_LOCAL_WITH_AVAILABLE_CORES = "local";
public static final int BATCH_DURATION_INTERVAL_MS = 50000;
public static final String KAFKA_BROKERS = "localhost:9092";
public static final String KAFKA_BROKERS = "kafka:9092";
public static final String KAFKA_OFFSET_RESET_TYPE = "latest";
public static final String KAFKA_GROUP = "omdgroup";
public static final String KAFKA_TOPIC = "TOPIC_OMD_ORDER_DATA";
public static final String MONGODB_OUTPUT_URI = "mongodb://localhost/Orders.orders";
public static final String MONGODB_OUTPUT_URI = "mongodb://mongodb-docker:27017/Orders.orders";
private OmdKafkaUtility() {
}
}
......@@ -40,6 +40,7 @@ public class OmdSparkStreamService {
.setMaster(OmdKafkaUtility.RUN_LOCAL_WITH_AVAILABLE_CORES)
.setAppName(OmdKafkaUtility.APPLICATION_NAME)
.set("spark.mongodb.output.uri", OmdKafkaUtility.MONGODB_OUTPUT_URI);
conf.set("spark.testing.memory", "2147480000");
JavaStreamingContext streamingContext
= new JavaStreamingContext(conf, new Duration(OmdKafkaUtility.BATCH_DURATION_INTERVAL_MS));
......
package com.nisum.omd;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class OmdStreamProcessorApplicationTests {
@Test
void contextLoads() {
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment