removed un used imports

parent d7d156da
......@@ -9,7 +9,6 @@ import java.util.Map;
import java.util.concurrent.TimeoutException;
import com.nisum.entity.UserCardInfo;
import com.nisum.utils.UserConstants;
import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
......@@ -57,7 +56,7 @@ public class SparkConsumer {
StreamingQuery query = ds.writeStream().outputMode(OutputMode.Append())
.option("checkpointLocation", "checkpointlocaion/streamingjob")
.foreach(new ForeachWriter<UserCardInfo>() {
.foreach(new ForeachWriter<>() {
private MongoClient mongoClient;
private MongoDatabase database;
private MongoCollection<Document> collection;
......
......@@ -4,7 +4,6 @@ import com.mongodb.spark.config.ReadConfig;
import java.util.concurrent.TimeoutException;
import com.nisum.utils.UserConstants;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
......@@ -15,7 +14,7 @@ import static com.nisum.utils.UserConstants.*;
public class SparkProducer {
public static void main(String[] args) throws TimeoutException, StreamingQueryException {
public static void main(String[] args) {
SparkSession spark = SparkSession.builder()
.master("local")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment