Commit 1caa6a16 authored by Swathi Karre's avatar Swathi Karre

Initial commit

parents
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**
!**/src/test/**
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
### VS Code ###
.vscode/
/*
* Copyright 2007-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.*;
import java.io.*;
import java.nio.channels.*;
import java.util.Properties;
public class MavenWrapperDownloader {
private static final String WRAPPER_VERSION = "0.5.6";
/**
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
*/
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
/**
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
* use instead of the default one.
*/
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
".mvn/wrapper/maven-wrapper.properties";
/**
* Path where the maven-wrapper.jar will be saved to.
*/
private static final String MAVEN_WRAPPER_JAR_PATH =
".mvn/wrapper/maven-wrapper.jar";
/**
* Name of the property which should be used to override the default download url for the wrapper.
*/
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
public static void main(String args[]) {
System.out.println("- Downloader started");
File baseDirectory = new File(args[0]);
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
// If the maven-wrapper.properties exists, read it and check if it contains a custom
// wrapperUrl parameter.
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
String url = DEFAULT_DOWNLOAD_URL;
if(mavenWrapperPropertyFile.exists()) {
FileInputStream mavenWrapperPropertyFileInputStream = null;
try {
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
Properties mavenWrapperProperties = new Properties();
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
} catch (IOException e) {
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
} finally {
try {
if(mavenWrapperPropertyFileInputStream != null) {
mavenWrapperPropertyFileInputStream.close();
}
} catch (IOException e) {
// Ignore ...
}
}
}
System.out.println("- Downloading from: " + url);
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
if(!outputFile.getParentFile().exists()) {
if(!outputFile.getParentFile().mkdirs()) {
System.out.println(
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
}
}
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
try {
downloadFileFromURL(url, outputFile);
System.out.println("Done");
System.exit(0);
} catch (Throwable e) {
System.out.println("- Error downloading");
e.printStackTrace();
System.exit(1);
}
}
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
String username = System.getenv("MVNW_USERNAME");
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password);
}
});
}
URL website = new URL(urlString);
ReadableByteChannel rbc;
rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(destination);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
fos.close();
rbc.close();
}
}
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
This diff is collapsed.
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%" == "on" pause
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
exit /B %ERROR_CODE%
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.11.RELEASE</version>
<relativePath/>
</parent>
<groupId>com.example</groupId>
<artifactId>springdatatransformer</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>springdatatransformer</name>
<description>Demo project for Spring Boot Data Transformer</description>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<confluent.version>5.4.0</confluent.version>
<kafka.version>5.4.0-ccs</kafka.version>
<mockito-core.version>1.10.19</mockito-core.version>
<powermock-module-junit4.version>2.0.4</powermock-module-junit4.version>
<powermock-api-mockito.version>1.7.4</powermock-api-mockito.version>
<main.class>com.example.SpringDataTransformerApplication</main.class>
</properties>
<repositories>
<repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
<dependency>
<groupId>io.projectreactor.kafka</groupId>
<artifactId>reactor-kafka</artifactId>
<version>1.1.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.projectreactor</groupId>
<artifactId>reactor-spring</artifactId>
<version>1.0.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>${confluent.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito-core.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock-module-junit4.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.powermock</groupId>
<artifactId>powermock-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.powermock</groupId>
<artifactId>powermock-reflect</artifactId>
</exclusion>
<exclusion>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito</artifactId>
<version>${powermock-api-mockito.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<mainClass>${main.class}</mainClass>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>3.0.0-M3</version>
<configuration>
<rules>
<dependencyConvergence/>
</rules>
</configuration>
</plugin>
</plugins>
</build>
</project>
package com.example.Controller;
import com.example.KafkaProducer.*;
import com.example.service.*;
import org.springframework.beans.factory.annotation.*;
import org.springframework.http.*;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.*;
import javax.validation.*;
import java.util.concurrent.*;
@RestController
@RequestMapping("/kafkaTopic")
public class CopientKafkaController {
@Autowired
ReactorKafkaProducer reactorKafkaProducer;
@PostMapping("/sendData")
public void sendDataToKafka() {
System.out.println("Send Data to Kakfa Topic: " );
try {
reactorKafkaProducer.sendMessages();
reactorKafkaProducer.close();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
package com.example.KafkaProducer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.*;
import org.springframework.stereotype.*;
import reactor.core.publisher.Flux;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
@Service
public class ReactorKafkaProducer {
private static final Logger log = LoggerFactory.getLogger(ReactorKafkaProducer.class.getName());
private final KafkaSender<String, String> sender;
@Value("${spring.kafka.producer.topic-name}")
private final String kafkaTopic;
private final SimpleDateFormat dateFormat;
public ReactorKafkaProducer(@Value("${spring.kafka.producer.topic-name}") String kafkaTopic, KafkaSender<String, String> sender) {
this.sender = sender;
this.kafkaTopic = kafkaTopic;
dateFormat = new SimpleDateFormat("HH:mm:ss:SSS z dd MMM yyyy");
}
public void sendMessages() throws InterruptedException {
sender.send(Flux.just("John","jenny", "Janu")
.map(i -> SenderRecord.create(new ProducerRecord<>(kafkaTopic, "names", i), i)))
.doOnError(e -> log.error("Send failed", e))
.subscribe(r -> {
RecordMetadata metadata = r.recordMetadata();
System.out.printf("Message %s sent successfully, topic-partition=%s - %d offset=%d timestamp=%s\n",
r.correlationMetadata(),
metadata.topic(),
metadata.partition(),
metadata.offset(),
dateFormat.format(new Date(metadata.timestamp())));
});
}
public void close() {
sender.close();
}
}
package com.example;
import com.example.KafkaProducer.*;
import com.example.WebClient.*;
import com.example.config.*;
import com.example.service.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.*;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.*;
import org.springframework.boot.autoconfigure.jdbc.*;
import org.springframework.boot.autoconfigure.web.servlet.*;
import org.springframework.context.*;
import org.springframework.context.annotation.*;
import org.springframework.context.support.*;
import org.springframework.core.io.*;
import org.springframework.util.*;
import org.springframework.web.reactive.config.*;
import reactor.core.publisher.*;
import java.io.*;
import java.nio.charset.*;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.concurrent.*;
@SpringBootApplication
@EnableWebFlux
@EnableAutoConfiguration(exclude = { DataSourceAutoConfiguration.class, WebMvcAutoConfiguration.class })
@ComponentScan("com.example")
public class SpringDataTransformerApplication {
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataTransformerApplication.class);
//static WebclientService webclientService=new WebclientService();
public static void main(String[] args) throws IOException, InterruptedException {
//AbstractApplicationContext appContext = new AnnotationConfigApplicationContext(KafkaConfig.class);
ApplicationContext context = SpringApplication.run(SpringDataTransformerApplication.class, args);
/*CsvReader dataReader = context.getBean(CsvReader.class);
LOGGER.info("Transformation Started: {}", LocalDateTime.now());
Instant start = Instant.now();
String json = dataReader.convert(args[0], args[1]);
LOGGER.info(json.isEmpty() ? "Data Not Found For Transformation" : "CSV Data transformed to JSON Successfully");
Instant finish = Instant.now();
LOGGER.info("Transformation Ended: {}", LocalDateTime.now());
long timeElapsed = Duration.between(start, finish).getSeconds();
LOGGER.info("Time Taken for Transformation: " + timeElapsed + " seconds");
*/
WebclientService webclientService = context.getBean(WebclientService.class);
webclientService.sendData();
/*ReactorKafkaProducer reactorKafkaProducer = context.getBean(ReactorKafkaProducer.class);
CountDownLatch latch = new CountDownLatch(count);
reactorKafkaProducer.sendMessages(count, latch);
latch.await(10, TimeUnit.SECONDS);
reactorKafkaProducer.close();*/
}
}
package com.example.WebClient;
import com.example.*;
import org.slf4j.*;
import org.springframework.http.*;
import org.springframework.stereotype.*;
import org.springframework.util.*;
import org.springframework.web.reactive.function.*;
import org.springframework.web.reactive.function.client.*;
import reactor.core.publisher.*;
import reactor.core.scheduler.*;
import java.util.concurrent.*;
@Component
public class KafkaWebClient {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaWebClient.class);
WebClient webClient = WebClient.builder()
.baseUrl("http://localhost:8080/")
.defaultHeader(HttpHeaders.CONTENT_TYPE, "application/json")
.build();
public void sendKafkaTopicWebClient() {
LOGGER.info("sendKafkaTopicWebClient ::");
webClient.post()
.uri("kafkaTopic/sendData")
.retrieve()
.bodyToMono(Integer.class)
//.flatMapMany(clientResponse -> clientResponse.bodyToMono(Integer.class))
/*.retrieve()
.bodyToFlux(String.class)
*/ .log()
.subscribe();
}
}
package com.example.config;
import com.example.WebClient.*;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.*;
import reactor.kafka.sender.*;
import java.text.*;
import java.util.*;
@Configuration
public class KafkaConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaConfig.class);
private final String clientId;
private final String bootStrapServers;
private final String keySerializer;
private final String valueSerializer;
private final String ackSetting;
public KafkaConfig(@Value("${spring.kafka.producer.client-id}") String clientId,
@Value("${spring.kafka.producer.bootstrap-servers}") String bootStrapServers,
@Value("${spring.kafka.producer.key-serializer}") String keySerializer,
@Value("${spring.kafka.producer.value-serializer}") String valueSerializer,
@Value("${spring.kafka.producer.ack-setting}") String ackSetting) {
this.clientId = clientId;
this.bootStrapServers = bootStrapServers;
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
this.ackSetting = ackSetting;
}
@Bean
public KafkaProducer<String, String> getKafkaProducer() {
LOGGER.info("Initializing Kafka Producer");
Properties properties = new Properties();
properties.put("client.id", clientId);
properties.put("bootstrap.servers", bootStrapServers);
properties.put("acks", ackSetting);
properties.put("key.serializer", keySerializer);
properties.put("value.serializer", valueSerializer);
return new KafkaProducer<>(properties);
}
@Bean
public KafkaSender getKafkaSender() {
final KafkaSender<Integer, String> sender;
LOGGER.info("Initializing Kafka Sender");
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers);
props.put(ProducerConfig.CLIENT_ID_CONFIG, clientId);
props.put(ProducerConfig.ACKS_CONFIG, ackSetting);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer);
SenderOptions<Integer, String> senderOptions = SenderOptions.create(props);
return KafkaSender.create(senderOptions);
}
}
package com.example.service;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
@Service
public class CopientKafkaService {
private static final Logger LOGGER = LoggerFactory.getLogger(CopientKafkaService.class);
private final KafkaProducer<String, String> kafkaProducer;
private final String kafkaTopic;
public CopientKafkaService(KafkaProducer<String, String> kafkaProducer, @Value("${spring.kafka.producer.topic-name}") String kafkaTopic) {
this.kafkaProducer = kafkaProducer;
this.kafkaTopic = kafkaTopic;
}
public void sendDataToKafkaTopic(String jsonData) throws ExecutionException, InterruptedException {
LOGGER.info("Sending data to Kafka Topic...");
ProducerRecord<String, String> data = new ProducerRecord<>(kafkaTopic, "copientCsvData", jsonData);
Future<RecordMetadata> future = kafkaProducer.send(data);
RecordMetadata metadata = future.get();
LOGGER.info("Getting Response Metadata == {}", metadata.toString());
kafkaProducer.flush();
}
}
package com.example.service;
import com.example.util.JsonMappingUtils;
import com.example.util.JsonUtils;
import org.springframework.stereotype.Component;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.*;
@Component
public class CsvReader {
private final CopientKafkaService kafkaService;
public CsvReader(CopientKafkaService kafkaService) {
this.kafkaService = kafkaService;
}
public String convert(String dataFilePath, String mappingFilePath) throws IOException {
StringBuffer json = new StringBuffer();
InputStream inputStream = new FileInputStream(dataFilePath);
String header;
JsonUtils.addListPrefixToJson(json);
try (Scanner fileScanner = new Scanner(inputStream, StandardCharsets.UTF_8.name())) {
header = fileScanner.hasNextLine() ? fileScanner.nextLine() : "";
if (!header.isEmpty()) {
JsonMappingUtils.getInstance(mappingFilePath, header);
while (fileScanner.hasNextLine()) {
String dataLine = fileScanner.nextLine();
if (dataLine.length() > header.split(JsonUtils.COMMA).length) {
String output = apply(dataLine);
// kafkaService.sendDataToKafkaTopic(output);
json.append(output).append(",");
json.deleteCharAt(json.length() - 1);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
// Stream<String> csvDataList = Files.lines(Paths.get(dataFilePath), StandardCharsets.UTF_8);
// JsonMappingUtils.getInstance(mappingFilePath, header);
// JsonUtils.addListPrefixToJson(json);
// Optional<String> finalOutput = csvDataList
// .skip(1)
// .filter(s -> !s.isEmpty())
// .parallel()
// .map(new CsvDataReader())
// .reduce((jsonObj1, jsonObj2) -> jsonObj1 + "," + jsonObj2);
// finalOutput.ifPresent(json::append);
JsonUtils.addListSuffixToJson(json);
return json.toString();
}
private String apply(String csvRowData) throws Exception {
StringBuilder finalOutput = new StringBuilder();
Optional<Map<String, Integer>> jsonVarToCsvDataIndexOptional = Optional.of(JsonMappingUtils.getDefaultInstance().getJsonVarToCsvDataIndex());
jsonVarToCsvDataIndexOptional.ifPresent(jsonVarToCsvDataIndex -> beginTransformToJson(csvRowData, jsonVarToCsvDataIndex, finalOutput));
return finalOutput.toString();
}
private void beginTransformToJson(String csvDataLine, Map<String, Integer> jsonVarToCsvDataIndex, StringBuilder finalOutput) {
String[] splitData = csvDataLine.split(",");
if (jsonVarToCsvDataIndex.keySet().size() != splitData.length) {
splitData = fixDoubleQuoteCsvFormatIssue(splitData);
}
List<String> correctedData = removeDoubleQuoteFromCsvData(splitData);
if (jsonVarToCsvDataIndex.size() - splitData.length > 0) {
fixCsvMissingData(correctedData, jsonVarToCsvDataIndex.size() - splitData.length);
}
StringBuffer jsonBody = new StringBuffer();
jsonVarToCsvDataIndex.forEach((key, value) -> JsonUtils.jsonVariable(jsonBody, key.trim(), correctedData.get(value).trim()));
jsonBody.deleteCharAt(jsonBody.length() - 1);
finalOutput.append(JsonUtils.addObjectBodyToJson(jsonBody));
finalOutput.deleteCharAt(finalOutput.length() - 1);
}
private String[] fixDoubleQuoteCsvFormatIssue(String[] splitData) {
String[] correctedCsvData = new String[0];
for (int i = 0; i < splitData.length; i++) {
StringBuilder columnData = new StringBuilder(splitData[i]);
if (columnData.toString().startsWith("\"")) {
int count = 1;
while (!columnData.toString().endsWith("\"")) {
columnData.append(",").append(splitData[i + count]);
count++;
}
splitData[i] = columnData.toString();
correctedCsvData = Arrays.copyOf(splitData, splitData.length - count + 1);
}
}
return correctedCsvData.length == 0 ? splitData : correctedCsvData;
}
private List<String> removeDoubleQuoteFromCsvData(String[] splitData) {
List<String> correctedDataList = new ArrayList<>();
for (String columnData : splitData) {
correctedDataList.add(columnData.replaceAll("\"", ""));
}
return correctedDataList;
}
private void fixCsvMissingData(List<String> csvData, int missingColumnDataCount) {
while (missingColumnDataCount >= 0) {
csvData.add("");
missingColumnDataCount--;
}
}
}
package com.example.service;
import com.example.WebClient.*;
import com.example.config.*;
import org.slf4j.*;
import org.springframework.beans.factory.annotation.*;
import org.springframework.core.io.*;
import org.springframework.stereotype.*;
import org.springframework.util.*;
import reactor.core.publisher.*;
import java.io.*;
import java.nio.charset.*;
@Service
public class WebclientService {
private static final Logger LOGGER = LoggerFactory.getLogger(WebclientService.class);
@Autowired
KafkaWebClient kafkaWebClient;
public void sendData(){
/* String jsonData="";
try {
Resource resource = new ClassPathResource("jsonData");
InputStream inputStream = resource.getInputStream();
byte[] bdata = FileCopyUtils.copyToByteArray(inputStream);
jsonData = new String(bdata, StandardCharsets.UTF_8);
LOGGER.info(jsonData);
} catch (IOException e) {
LOGGER.error("IOException", e);
}*/
kafkaWebClient.sendKafkaTopicWebClient();
}
}
package com.example.util;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JsonMappingUtils {
private transient Map<String, Integer> jsonVarToCsvDataIndex = new HashMap<>();
private static JsonMappingUtils jsonMappingUtils = null;
private JsonMappingUtils() {
}
private JsonMappingUtils(String mappingFilePath, String csvHeaderInfo) {
populateJsonVarVsIndexMap(mappingFilePath, csvHeaderInfo);
}
public static JsonMappingUtils getInstance(String mappingFilePath, String csvHeaderInfo) {
if (jsonMappingUtils == null) {
synchronized (JsonMappingUtils.class) {
if (jsonMappingUtils == null) {
jsonMappingUtils = new JsonMappingUtils(mappingFilePath, csvHeaderInfo);
}
}
}
return jsonMappingUtils;
}
public static JsonMappingUtils getDefaultInstance() throws Exception {
if (jsonMappingUtils == null) {
synchronized (JsonMappingUtils.class) {
if (jsonMappingUtils == null) {
jsonMappingUtils = new JsonMappingUtils();
return jsonMappingUtils;
}
}
} else {
if (jsonMappingUtils.jsonVarToCsvDataIndex.isEmpty()) {
throw new Exception("Mapping content unavailable, initialize the Mapping Util with parameterized static instance method");
}
}
return jsonMappingUtils;
}
private void populateJsonVarVsIndexMap(String mappingFilePath, String csvHeaderInfo) {
List<String> csvHeaderNames = Arrays.asList(csvHeaderInfo.split(","));
Map<String, String> headerNameVsJsonVarMap = new HashMap<>();
try {
List<String> list = Files.readAllLines(Paths.get(mappingFilePath), StandardCharsets.UTF_8);
list.forEach(s -> {
String[] headerVsVar = s.split(":");
headerNameVsJsonVarMap.put(headerVsVar[0], headerVsVar[1]);
});
} catch (IOException e) {
e.printStackTrace();
}
headerNameVsJsonVarMap.forEach((key, value) -> {
int headerIndex = csvHeaderNames.indexOf(key);
jsonVarToCsvDataIndex.put(value.trim(), headerIndex);
});
}
public Map<String, Integer> getJsonVarToCsvDataIndex() {
return new HashMap<>(jsonVarToCsvDataIndex);
}
}
package com.example.util;
public class JsonUtils {
public JsonUtils() {
}
public static final String PREFIX = "{";
public static final String SUFFIX = "}";
public static final String SEPARATOR = ":";
public static final String DOUBLE_QUOTES = "\"";
public static final String COMMA = ",";
public static final String ARRAY_PREFIX = "[";
public static final String ARRAY_SUFFIX = "]";
public static void jsonVariable(StringBuffer jsonVar, String key, String value) {
jsonVar.append(DOUBLE_QUOTES).append(key).append(DOUBLE_QUOTES).append(SEPARATOR).append(DOUBLE_QUOTES).append(value).append(DOUBLE_QUOTES).append(COMMA);
}
public static String addObjectBodyToJson(StringBuffer jsonBody) {
jsonBody.insert(0, PREFIX).append(SUFFIX).append(COMMA);
return jsonBody.toString();
}
public static void addListPrefixToJson(StringBuffer jsonObject) {
jsonObject.insert(0, ARRAY_PREFIX);
}
public static void addListSuffixToJson(StringBuffer jsonObject) {
jsonObject.append(ARRAY_SUFFIX);
}
}
This diff is collapsed.
spring:
application:
name: springdatatransformer
kafka:
producer:
bootstrap-servers: localhost:9092
client-id: reactorkafkapoc
ack-setting: all
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
topic-name: copient-kafka-poc
# spark:
# app-name: copientName
# master-uri: local
# spark-home: D:/spark-2.4.4-bin-hadoop2.7
This diff is collapsed.
Department:department_name
First Name: first_name
Last Name: last_name
Primary Contact: primary_contact
E-Mail Address: email_address
Phone Number: phone_number
Role: role
\ No newline at end of file
This diff is collapsed.
{
"name":"John",
"age":25
}
\ No newline at end of file
package com.example;
import org.junit.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class SpringDataTransformerApplicationTests {
@Test
public void context() {
}
}
package com.example.service;
import com.example.util.JsonMappingUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
import static org.mockito.Matchers.anyString;
import static org.powermock.api.mockito.PowerMockito.*;
@RunWith(PowerMockRunner.class)
@PrepareForTest({JsonMappingUtils.class, CsvReader.class})
public class CsvReaderTest {
@InjectMocks
CsvReader csvReader;
@Mock
CopientKafkaService kafkaService;
@Before
public void setUp() throws Exception {
mockStatic(JsonMappingUtils.class);
FileInputStream inputStream = mock(FileInputStream.class);
whenNew(FileInputStream.class).withAnyArguments().thenReturn(inputStream);
Scanner scanner = new Scanner(createCsvDataStub());
whenNew(Scanner.class).withAnyArguments().thenReturn(scanner);
JsonMappingUtils mappingStub = mock(JsonMappingUtils.class);
when(JsonMappingUtils.getDefaultInstance()).thenReturn(mappingStub);
when(mappingStub.getJsonVarToCsvDataIndex()).thenReturn(createMappingCsvKeyStub());
// doNothing().when(kafkaService).sendDataToKafkaTopic(anyString());
}
private String createCsvDataStub() {
return "Department,First Name,Last Name,Primary Contact,E-Mail Address,Phone Number,Role\n" +
"Austin Fire,Christine,Thies,Yes,Christine.Thies@ausps.org,512-974-4119,Liaison\n" +
"Austin Fire,Gus,Delgado,,gus.delgado@austintexas.gov,512-974-2359,Backup Liaison";
}
private Map<String, Integer> createMappingCsvKeyStub() {
Map<String, Integer> csvKeys = new HashMap<>();
csvKeys.put("department_name", 0);
csvKeys.put("first_name", 1);
csvKeys.put("last_name", 2);
csvKeys.put("primary_contact", 3);
csvKeys.put("email_address", 4);
csvKeys.put("phone_number", 5);
csvKeys.put("role", 6);
return csvKeys;
}
@Test
public void convert() throws IOException {
String jsonTest = csvReader.convert("", "");
System.out.println(jsonTest);
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment