Commit 28886e94 authored by Rdava's avatar Rdava

kafka and spark usage

parents
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="test" value="true"/>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>kafkaandspark</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.8
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
org.eclipse.jdt.core.compiler.release=disabled
org.eclipse.jdt.core.compiler.source=1.8
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>basicjavaspark</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.javaspark.basic</groupId>
<artifactId>kafkaandspark</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.3.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.2.2</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.streams.kafka.consumer;
import java.util.Properties;
import java.util.Arrays;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.ConsumerRecord;
public class KafkaConsumerToReadFile {
public static void main(String[] args) throws Exception {
Logger.getLogger("org.apache").setLevel(Level.WARN);
String topicName = "test";
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("group.id", "test");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("auto.offset.reset", "earliest");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList("test"));
//print the topic name
System.out.println("Subscribed to topic " + topicName);
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records)
// print the offset,key and value for the consumer records.
System.out.printf("offset = %d, key = %s, value = %s\n",
record.offset(), record.key(), record.value());
}
}
}
\ No newline at end of file
package com.streams.kafka.producer;
import java.util.Properties;
import java.util.Arrays;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.ConsumerRecord;
public class ConsumerGroup {
public static void main(String[] args) throws Exception {
/* if(args.length < 2){
System.out.println("Usage: consumer <topic> <groupname>");
return;
}*/
Logger.getLogger("org.apache").setLevel(Level.WARN);
String topic = "test11";
//String group = args[1].toString();
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("group.id", "test-consumer-group");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
consumer.subscribe(Arrays.asList(topic));
System.out.println("Subscribed to topic " + topic);
int i = 0;
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records)
System.out.printf("offset = %d, key = %s, value = %s\n",
record.offset(), record.key(), record.value());
}
}
}
\ No newline at end of file
package com.streams.kafka.producer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
public class KafkaProducerToReadFile {
public static void main(String[] args) {
Logger.getLogger("org.apache").setLevel(Level.WARN);
Properties kafkaParams = new Properties();
kafkaParams.put("bootstrap.servers", "localhost:9092");
kafkaParams.put("group.id", "test-consumer-group");
kafkaParams.put("key.serializer", StringSerializer.class);
kafkaParams.put("value.serializer", StringSerializer.class);
int lineCount=0;
try {
Producer<String, String> producer = new KafkaProducer<>(kafkaParams);
BufferedReader reader = new BufferedReader(new FileReader(new File("./src/main/resources/data/testinput.txt")));
String line = null;
while ((line = reader.readLine()) != null) {
lineCount++;
producer.send(new ProducerRecord<String, String>("test", lineCount+"", line));
System.out.println(":" + line);
line = reader.readLine();
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/*package com.basicjavaspark.sample;
//import util.properties packages
import java.util.Properties;
//import simple producer packages
import org.apache.kafka.clients.producer.Producer;
//import KafkaProducer packages
import org.apache.kafka.clients.producer.KafkaProducer;
//import ProducerRecord packages
import org.apache.kafka.clients.producer.ProducerRecord;
//Create java class named �SimpleProducer�
public class KafkaProducerToReadFile {
public static void main(String[] args) throws Exception{
//Assign topicName to string variable
String topicName = "test";
// create instance for properties to access producer configs
Properties props = new Properties();
//Assign localhost id
props.put("bootstrap.servers", "localhost:9092");
//Set acknowledgements for producer requests.
props.put("acks", "all");
//If the request fails, the producer can automatically retry,
props.put("retries", 0);
//Specify buffer size in config
props.put("batch.size", 16384);
//Reduce the no of requests less than 0
props.put("linger.ms", 1);
//The buffer.memory controls the total amount of memory available to the producer for buffering.
props.put("buffer.memory", 33554432);
props.put("key.serializer",
"org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer",
"org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer
<String, String>(props);
for(int i = 0; i < 10; i++)
producer.send(new ProducerRecord<String, String>(topicName,
Integer.toString(i), Integer.toString(i)));
System.out.println("Message sent successfully");
producer.close();
}
}*/
package com.streams.kafka.producer;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
public class TextRead {
@SuppressWarnings("resource")
public static void main(String[] args) {
/*
* List<String> inputData = new ArrayList();
*
* inputData.add("Prasad"); inputData.add("Rajendra"); inputData.add("Spark");
*/
Logger.getLogger("org.apache").setLevel(Level.WARN);
List<Integer> inputData = new ArrayList();
inputData.add(35);
inputData.add(25);
inputData.add(22);
inputData.add(55);
Logger.getLogger("org.apache").setLevel(Level.WARN);
SparkConf conf = new SparkConf().setAppName("basicjavaspark").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Integer> javaRdd = sc.parallelize(inputData);
Integer result = javaRdd.reduce((value1, value2) -> value1 + value2);
JavaRDD<Double> sqrtRDD = javaRdd.map(value -> Math.sqrt(value));
sqrtRDD.foreach(value -> System.out.println(value));
//System.out.println(result);
// javaRdd.foreach(System.out::println);
sc.close();
}
}
package com.streams.sparkstreaming;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
public class KafkaStreamSparkTest {
public static void main(String[] args) throws InterruptedException {
Logger logger=Logger.getLogger(KafkaStreamSparkTest.class);
Logger.getLogger("org.apache").setLevel(Level.WARN);
SparkConf conf = new SparkConf().setAppName("kafkaspark").setMaster("local[*]");
JavaStreamingContext ssc = new JavaStreamingContext(conf, new Duration(2000));
Set<String> topics = Collections.singleton("mytest1");
Map<String, Object> kafkaParams = new HashMap();
kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, "use_a_separate_group_id_for_each_stream");
kafkaParams.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
kafkaParams.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
JavaInputDStream<ConsumerRecord<String, String>> directKafkaStream = KafkaUtils.createDirectStream(ssc,
LocationStrategies.PreferConsistent(),
ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams));
directKafkaStream.foreachRDD(rdd -> {
logger.info(
"--- New RDD with " + rdd.partitions().size() + " partitions and " + rdd.count() + " records");
rdd.foreach(record -> System.out.println(record));
});
ssc.start();
ssc.awaitTermination();
}
}
{
"recordData": {
"fieldNames": ["EMAIL_ADDRESS_"],
"records": [{
"fieldValues": ["hvayyala@nisum.com"]
}]
},
"mergeRule": {
"insertOnNoMatch": true,
"updateOnMatch": "REPLACE_ALL",
"matchColumnName1": "EMAIL_ADDRESS_",
"matchOperator": "NONE",
"optinValue": "I",
"optoutValue": "O",
"htmlValue": "H",
"textValue": "T",
"defaultPermissionStatus": "OPTIN"
},
"triggerData": [{
"optionalData": [{
"name": "TREATMENT_CODE",
"value": "TEST_604_profile_activate"
}, {
"name": "CELL_ID",
"value": "OET-COM6041AMCOM010819"
}, {
"name": "RESERVATION_NUMBER"
}, {
"name": "INTERNET_ORDER_NUMBER"
}, {
"name": "OES_TRANSACTION_ID",
"value": "174465252"
}, {
"name": "BRAND_TYPE",
"value": "MCOM"
}, {
"name": "MAIL_TYPE",
"value": "604"
}, {
"name": "MAIL_SUB_TYPE",
"value": "1A"
}, {
"name": "CLIENT_ID",
"value": "COM"
}, {
"name": "IS_RULE_ENABLED",
"value": "false"
}, {
"name": "website",
"value": "{\"promoUrl\":\" https://www.mcom-199.tbe.zeus.fds.com/account/createaccount?oca=15_VR%2B7fn8VXASSAhhcuf2bG9QMF36YkEaGmTImh7ocCVfQiq8Aq0deeDa3lP%2BVEvIA &cm_mmc=OpsEmail-_-TEST_604_profile_activate-_-activate_now-_-MB&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}&m_sc=em&m_sb=MCOM&m_tp=1A&m_cn=604_profile_activate&m_pi=MB&m_l=activate_now&m_s=OET-COM6041AMCOM010819&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}\",\"promoReportAbuseUrl\":\"https://www.mcom-199.tbe.zeus.fds.com/account/report-abuse?oca=15_VR%2B7fn8VXASSAhhcuf2bG9QMF36YkEaGmTImh7ocCVfQiq8Aq0deeDa3lP%2BVEvIA&cm_mmc=OpsEmail-_-TEST_604_profile_activate-_-opt_out-_-MB&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}&m_sc=em&m_sb=MCOM&m_tp=1A&m_cn=604_profile_activate&m_pi=MB&m_l=opt_out&m_s=OET-COM6041AMCOM010819&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}\",\"promoUserFirstName\":\"Hemasree Vayyala\",\"promoOneTimePwdExpTime\":\"4\"}"
}]
}]
}
\ No newline at end of file
{"deviceid":"1000","humidity":"43","temperature":"85","dt":"2014-01-10"}
{"deviceid":"1000","humidity":"32","temperature":"27","dt":"2014-01-11"}
{"deviceid":"1003","humidity":"64","temperature":"36","dt":"2014-01-12"}
{"deviceid":"1006","humidity":"7","temperature":"66","dt":"2014-01-13"}
{"deviceid":"1002","humidity":"86","temperature":"43","dt":"2014-01-14"}
{"deviceid":"1002","humidity":"34","temperature":"12","dt":"2014-01-14"}
{"deviceid":"1080","humidity":"56","temperature":"9","dt":"2014-01-14"}
{"deviceid":"1005","humidity":"1","temperature":"23","dt":"2014-01-10"}
{"deviceid":"1006","humidity":"65","temperature":"12","dt":"2014-01-10"}
{"deviceid":"1009","humidity":"4","temperature":"3","dt":"2014-01-15"}
{"deviceid":"1007","humidity":"77","temperature":"4","dt":"2014-01-16"}
{"deviceid":"1008","humidity":"91","temperature":"7","dt":"2014-01-17"}
{"deviceid":"1006","humidity":"45","temperature":"2","dt":"2014-01-17"}
{"deviceid":"1002","humidity":"32","temperature":"1","dt":"2014-01-18"}
{"deviceid":"1000","humidity":"14","temperature":"1","dt":"2014-01-19"}
{"deviceid":"1000","humidity":"61","temperature":"98","dt":"2014-01-20"}
{"deviceid":"1004","humidity":"53","temperature":"112","dt":"2014-01-20"}
{"deviceid":"1004","humidity":"76","temperature":"42","dt":"2014-01-20"}
{"deviceid":"1010","humidity":"64","temperature":"76","dt":"2014-01-20"}
{"deviceid":"1011","humidity":"13","temperature":"1","dt":"2014-01-31"}
\ No newline at end of file
Manifest-Version: 1.0
Built-By: rdava
Build-Jdk: 1.8.0_231
Created-By: Maven Integration for Eclipse
#Generated by Maven Integration for Eclipse
#Mon Mar 02 18:24:31 IST 2020
version=0.0.1-SNAPSHOT
groupId=com.javaspark.basic
m2e.projectName=kafkaandspark
m2e.projectLocation=/Users/rdava/Documents/javaworkspace/dbs/kafkaandspark
artifactId=kafkaandspark
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.javaspark.basic</groupId>
<artifactId>kafkaandspark</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.3.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.2.2</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
{
"recordData": {
"fieldNames": ["EMAIL_ADDRESS_"],
"records": [{
"fieldValues": ["hvayyala@nisum.com"]
}]
},
"mergeRule": {
"insertOnNoMatch": true,
"updateOnMatch": "REPLACE_ALL",
"matchColumnName1": "EMAIL_ADDRESS_",
"matchOperator": "NONE",
"optinValue": "I",
"optoutValue": "O",
"htmlValue": "H",
"textValue": "T",
"defaultPermissionStatus": "OPTIN"
},
"triggerData": [{
"optionalData": [{
"name": "TREATMENT_CODE",
"value": "TEST_604_profile_activate"
}, {
"name": "CELL_ID",
"value": "OET-COM6041AMCOM010819"
}, {
"name": "RESERVATION_NUMBER"
}, {
"name": "INTERNET_ORDER_NUMBER"
}, {
"name": "OES_TRANSACTION_ID",
"value": "174465252"
}, {
"name": "BRAND_TYPE",
"value": "MCOM"
}, {
"name": "MAIL_TYPE",
"value": "604"
}, {
"name": "MAIL_SUB_TYPE",
"value": "1A"
}, {
"name": "CLIENT_ID",
"value": "COM"
}, {
"name": "IS_RULE_ENABLED",
"value": "false"
}, {
"name": "website",
"value": "{\"promoUrl\":\" https://www.mcom-199.tbe.zeus.fds.com/account/createaccount?oca=15_VR%2B7fn8VXASSAhhcuf2bG9QMF36YkEaGmTImh7ocCVfQiq8Aq0deeDa3lP%2BVEvIA &cm_mmc=OpsEmail-_-TEST_604_profile_activate-_-activate_now-_-MB&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}&m_sc=em&m_sb=MCOM&m_tp=1A&m_cn=604_profile_activate&m_pi=MB&m_l=activate_now&m_s=OET-COM6041AMCOM010819&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}\",\"promoReportAbuseUrl\":\"https://www.mcom-199.tbe.zeus.fds.com/account/report-abuse?oca=15_VR%2B7fn8VXASSAhhcuf2bG9QMF36YkEaGmTImh7ocCVfQiq8Aq0deeDa3lP%2BVEvIA&cm_mmc=OpsEmail-_-TEST_604_profile_activate-_-opt_out-_-MB&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}&m_sc=em&m_sb=MCOM&m_tp=1A&m_cn=604_profile_activate&m_pi=MB&m_l=opt_out&m_s=OET-COM6041AMCOM010819&cm_lm_mo64=${CONTACTS_LIST_TEST.EMAIL_ADDRESS_?base64}\",\"promoUserFirstName\":\"Hemasree Vayyala\",\"promoOneTimePwdExpTime\":\"4\"}"
}]
}]
}
\ No newline at end of file
{"deviceid":"1000","humidity":"43","temperature":"85","dt":"2014-01-10"}
{"deviceid":"1000","humidity":"32","temperature":"27","dt":"2014-01-11"}
{"deviceid":"1003","humidity":"64","temperature":"36","dt":"2014-01-12"}
{"deviceid":"1006","humidity":"7","temperature":"66","dt":"2014-01-13"}
{"deviceid":"1002","humidity":"86","temperature":"43","dt":"2014-01-14"}
{"deviceid":"1002","humidity":"34","temperature":"12","dt":"2014-01-14"}
{"deviceid":"1080","humidity":"56","temperature":"9","dt":"2014-01-14"}
{"deviceid":"1005","humidity":"1","temperature":"23","dt":"2014-01-10"}
{"deviceid":"1006","humidity":"65","temperature":"12","dt":"2014-01-10"}
{"deviceid":"1009","humidity":"4","temperature":"3","dt":"2014-01-15"}
{"deviceid":"1007","humidity":"77","temperature":"4","dt":"2014-01-16"}
{"deviceid":"1008","humidity":"91","temperature":"7","dt":"2014-01-17"}
{"deviceid":"1006","humidity":"45","temperature":"2","dt":"2014-01-17"}
{"deviceid":"1002","humidity":"32","temperature":"1","dt":"2014-01-18"}
{"deviceid":"1000","humidity":"14","temperature":"1","dt":"2014-01-19"}
{"deviceid":"1000","humidity":"61","temperature":"98","dt":"2014-01-20"}
{"deviceid":"1004","humidity":"53","temperature":"112","dt":"2014-01-20"}
{"deviceid":"1004","humidity":"76","temperature":"42","dt":"2014-01-20"}
{"deviceid":"1010","humidity":"64","temperature":"76","dt":"2014-01-20"}
{"deviceid":"1011","humidity":"13","temperature":"1","dt":"2014-01-31"}
\ No newline at end of file
#Generated by Maven
#Mon Sep 30 17:27:58 IST 2019
version=0.0.1-SNAPSHOT
groupId=com.javaspark.basic
artifactId=basicjavaspark
E:\dbs\basicjavaspark\src\main\java\com\basicjavaspark\sample\TextRead.java
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment