Moved to constants

parent b4ccd2e9
......@@ -9,6 +9,7 @@ import java.util.Map;
import java.util.concurrent.TimeoutException;
import com.nisum.entity.UserCardInfo;
import com.nisum.utils.UserConstants;
import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
......@@ -16,18 +17,20 @@ import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.types.StructType;
import org.bson.Document;
import static com.nisum.utils.UserConstants.*;
public class SparkConsumer {
static String TOPIC = "UserInfoTopic";
public static void main(String[] args) throws TimeoutException, StreamingQueryException {
SparkSession spark = SparkSession.builder()
.master("local[*]")
.appName("MongoSparkConnectorIntro")
.config("spark.mongodb.output.uri", "mongodb://127.0.0.1/UserDB.users_info")
.config("spark.mongodb.output.uri", USERDB_HOST)
.getOrCreate();
Map<String, String> kafkaConfigMap = new HashMap<>();
kafkaConfigMap.put("kafka.bootstrap.servers", "localhost:9092");
kafkaConfigMap.put("kafka.bootstrap.servers", KAFKA_HOST);
kafkaConfigMap.put("subscribe", TOPIC);
kafkaConfigMap.put("startingOffsets", "earliest");
kafkaConfigMap.put("failOnDataLoss", "false");
......@@ -61,9 +64,9 @@ public class SparkConsumer {
@Override
public boolean open(long partitionId, long epochId) {
mongoClient = MongoClients.create("mongodb://localhost:27017");
database = mongoClient.getDatabase("UserDB");
collection = database.getCollection("user_card_details");
mongoClient = MongoClients.create(URI);
database = mongoClient.getDatabase(CARD_DETAILS_DB);
collection = database.getCollection(USER_CARD_DETAILS);
return true;
}
......
......@@ -4,28 +4,29 @@ import com.mongodb.spark.config.ReadConfig;
import java.util.concurrent.TimeoutException;
import com.nisum.utils.UserConstants;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.storage.StorageLevel;
import static com.nisum.utils.UserConstants.*;
public class SparkProducer {
static String TOPIC = "UserInfoTopic";
public static void main(String[] args) throws TimeoutException, StreamingQueryException {
SparkSession spark = SparkSession.builder()
.master("local")
.appName("MongoSparkConnectorIntro")
.config("spark.mongodb.input.uri", "mongodb://127.0.0.1/UserDB.users_info")
.config("spark.mongodb.input.uri", USERDB_HOST)
.getOrCreate();
ReadConfig readConfigUserInfo = ReadConfig.create(spark)
.withOption("uri", "mongodb://localhost:27017")
.withOption("database", "UserDB")
.withOption("collection", "users_info");
.withOption("uri", URI)
.withOption("database", USER_DB)
.withOption("collection", USERS_INFO);
Dataset<Row> datasetUserInfo = spark.read()
.format("mongo")
......@@ -34,9 +35,9 @@ public class SparkProducer {
datasetUserInfo.createOrReplaceTempView("user");
ReadConfig readConfigUserCardInfo = ReadConfig.create(spark)
.withOption("uri", "mongodb://localhost:27017")
.withOption("database", "UserDB")
.withOption("collection", "users_cards_info");
.withOption("uri", URI)
.withOption("database", USER_DB)
.withOption("collection", USERS_CARDS_INFO);
Dataset<Row> datasetUserCardInfo = spark.read()
.format("mongo")
......@@ -61,7 +62,7 @@ public class SparkProducer {
userCardInfoJson
.write()
.format("kafka")
.option("kafka.bootstrap.servers", "localhost:9092")
.option("kafka.bootstrap.servers", KAFKA_HOST)
.option("topic", TOPIC).save();
spark.close();
......
package com.nisum.utils;
public class UserConstants {
public static final String KAFKA_HOST = "localhost:9092";
public static final String TOPIC = "UserInfoTopic";
public static final String USERDB_HOST = "mongodb://127.0.0.1/UserDB.users_info";
public static final String URI = "mongodb://localhost:27017";
public static final String CARD_DETAILS_DB = "CardDetailsDB";
public static final String USER_CARD_DETAILS = "user_card_details";
public static final String USER_DB = "UserDB";
public static final String USERS_INFO = "users_info";
public static final String USERS_CARDS_INFO = "users_cards_info";
}
\ No newline at end of file
com.nisum.topicName = UserInfoTopic
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment