Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
W
warehouse-management
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Ascend
warehouse-management
Commits
b253dadb
Commit
b253dadb
authored
May 10, 2021
by
Philippe Fonzin
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
reactive kafka intergration
parent
b40629b8
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
157 additions
and
142 deletions
+157
-142
pom.xml
pom.xml
+13
-0
KafkaConfig.java
.../com/ascendfinalproject/warehouse/config/KafkaConfig.java
+69
-0
KafkaController.java
...ndfinalproject/warehouse/controllers/KafkaController.java
+0
-27
WarehouseController.java
...nalproject/warehouse/controllers/WarehouseController.java
+10
-0
Receiver.java
...m/ascendfinalproject/warehouse/kafkaservice/Receiver.java
+25
-0
Sender.java
...com/ascendfinalproject/warehouse/kafkaservice/Sender.java
+35
-0
Consumer.java
...a/com/ascendfinalproject/warehouse/services/Consumer.java
+0
-21
KafkaConsumerConfig.java
...dfinalproject/warehouse/services/KafkaConsumerConfig.java
+0
-23
KafkaProducerConfig.java
...dfinalproject/warehouse/services/KafkaProducerConfig.java
+0
-26
Producer.java
...a/com/ascendfinalproject/warehouse/services/Producer.java
+0
-31
application.properties
src/main/resources/application.properties
+5
-14
No files found.
pom.xml
View file @
b253dadb
...
...
@@ -74,6 +74,19 @@
<artifactId>
httpclient
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
io.projectreactor
</groupId>
<artifactId>
reactor-test
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
io.projectreactor.kafka
</groupId>
<artifactId>
reactor-kafka
</artifactId>
</dependency>
</dependencies>
<build>
...
...
src/main/java/com/ascendfinalproject/warehouse/config/KafkaConfig.java
0 → 100644
View file @
b253dadb
package
com
.
ascendfinalproject
.
warehouse
.
config
;
import
org.apache.kafka.clients.consumer.ConsumerConfig
;
import
org.apache.kafka.clients.producer.ProducerConfig
;
import
org.apache.kafka.common.serialization.StringDeserializer
;
import
org.apache.kafka.common.serialization.StringSerializer
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.context.annotation.Bean
;
import
org.springframework.context.annotation.Configuration
;
import
reactor.kafka.receiver.KafkaReceiver
;
import
reactor.kafka.receiver.ReceiverOptions
;
import
reactor.kafka.sender.KafkaSender
;
import
reactor.kafka.sender.SenderOptions
;
import
java.util.Collection
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.Map
;
@Configuration
public
class
KafkaConfig
{
@Value
(
"${kafka.producer.bootstrap-servers}"
)
private
String
bootstrapServers
;
@Value
(
"${kafka.producer.acks}"
)
private
String
acks
;
@Value
(
"${kafka.consumer.group-id}"
)
private
String
groupId
;
@Bean
public
Map
<
String
,
Object
>
consumerFactory
()
{
Map
<
String
,
Object
>
receiverConfigProps
=
new
HashMap
<>();
receiverConfigProps
.
put
(
ConsumerConfig
.
BOOTSTRAP_SERVERS_CONFIG
,
bootstrapServers
);
receiverConfigProps
.
put
(
ConsumerConfig
.
GROUP_ID_CONFIG
,
groupId
);
receiverConfigProps
.
put
(
ConsumerConfig
.
KEY_DESERIALIZER_CLASS_CONFIG
,
StringDeserializer
.
class
);
receiverConfigProps
.
put
(
ConsumerConfig
.
VALUE_DESERIALIZER_CLASS_CONFIG
,
StringDeserializer
.
class
);
return
receiverConfigProps
;
}
@Bean
public
KafkaReceiver
<
String
,
String
>
kafkaEventReceiver
(
@Value
(
"${kafka.topic.input}"
)
String
topic
)
{
// creates specified config options for kafkaReceiver using consumerFactory
ReceiverOptions
<
String
,
String
>
receiverOptions
=
ReceiverOptions
.
create
(
consumerFactory
());
receiverOptions
.
maxCommitAttempts
(
5
);
return
KafkaReceiver
.
create
(
receiverOptions
.
addAssignListener
(
Collection:
:
iterator
).
subscription
(
Collections
.
singleton
(
topic
)));
}
@Bean
public
Map
<
String
,
Object
>
producerFactory
()
{
Map
<
String
,
Object
>
senderConfigProps
=
new
HashMap
<>();
senderConfigProps
.
put
(
ProducerConfig
.
BOOTSTRAP_SERVERS_CONFIG
,
bootstrapServers
);
senderConfigProps
.
put
(
ProducerConfig
.
ACKS_CONFIG
,
acks
);
senderConfigProps
.
put
(
ProducerConfig
.
KEY_SERIALIZER_CLASS_CONFIG
,
StringSerializer
.
class
);
senderConfigProps
.
put
(
ProducerConfig
.
VALUE_SERIALIZER_CLASS_CONFIG
,
StringSerializer
.
class
);
return
senderConfigProps
;
}
@Bean
public
KafkaSender
<
String
,
String
>
kafkaEventProducer
()
{
// creates specified config options for kafkaSender using producerFactory
SenderOptions
<
String
,
String
>
senderOptions
=
SenderOptions
.
create
(
producerFactory
());
return
KafkaSender
.
create
(
senderOptions
);
}
}
src/main/java/com/ascendfinalproject/warehouse/controllers/KafkaController.java
deleted
100644 → 0
View file @
b40629b8
package
com
.
ascendfinalproject
.
warehouse
.
controllers
;
import
com.ascendfinalproject.warehouse.services.Producer
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.web.bind.annotation.PostMapping
;
import
org.springframework.web.bind.annotation.RequestMapping
;
import
org.springframework.web.bind.annotation.RequestParam
;
import
org.springframework.web.bind.annotation.RestController
;
@RestController
@RequestMapping
(
value
=
"/kafka"
)
public
class
KafkaController
{
private
final
Producer
producer
;
@Autowired
KafkaController
(
Producer
producer
)
{
this
.
producer
=
producer
;
}
@PostMapping
(
value
=
"/fulfilled"
)
public
void
sendMessageToKafkaTopic
(
@RequestParam
(
"message"
)
String
message
)
{
this
.
producer
.
orderFulfilled
(
message
);
}
}
src/main/java/com/ascendfinalproject/warehouse/controllers/WarehouseController.java
View file @
b253dadb
package
com
.
ascendfinalproject
.
warehouse
.
controllers
;
import
com.ascendfinalproject.warehouse.kafkaservice.Sender
;
import
com.ascendfinalproject.warehouse.models.OrderResponse
;
import
com.ascendfinalproject.warehouse.models.WarehouseOrder
;
import
com.ascendfinalproject.warehouse.services.WarehouseOrderService
;
...
...
@@ -16,6 +17,15 @@ public class WarehouseController {
@Autowired
WarehouseOrderService
orderService
;
@Autowired
Sender
sender
;
@PostMapping
(
"/fulfilled"
)
public
void
getOrderStatusFromWarehouse
()
{
sender
.
sendOrderStatus
(
"fulfilled"
);
}
@CrossOrigin
@GetMapping
(
value
=
"/orders"
)
public
Flux
<
WarehouseOrder
>
getOrders
()
{
...
...
src/main/java/com/ascendfinalproject/warehouse/kafkaservice/Receiver.java
0 → 100644
View file @
b253dadb
package
com
.
ascendfinalproject
.
warehouse
.
kafkaservice
;
import
lombok.extern.slf4j.Slf4j
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.boot.context.event.ApplicationStartedEvent
;
import
org.springframework.context.event.EventListener
;
import
org.springframework.stereotype.Service
;
import
reactor.kafka.receiver.KafkaReceiver
;
@Service
@Slf4j
public
class
Receiver
{
@Autowired
private
KafkaReceiver
<
String
,
String
>
kafkaReceiver
;
@EventListener
(
ApplicationStartedEvent
.
class
)
public
void
consumeNewOrder
()
{
kafkaReceiver
.
receive
()
.
doOnNext
(
record
->
log
.
info
(
String
.
format
(
"Receive message: %s "
,
record
.
value
())))
.
doOnError
(
throwable
->
System
.
out
.
println
(
throwable
.
getMessage
()))
.
subscribe
();
}
}
src/main/java/com/ascendfinalproject/warehouse/kafkaservice/Sender.java
0 → 100644
View file @
b253dadb
package
com
.
ascendfinalproject
.
warehouse
.
kafkaservice
;
import
lombok.extern.slf4j.Slf4j
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.stereotype.Service
;
import
reactor.core.publisher.Flux
;
import
reactor.core.publisher.Mono
;
import
reactor.kafka.sender.KafkaSender
;
import
reactor.kafka.sender.SenderRecord
;
import
reactor.kafka.sender.SenderResult
;
@Service
@Slf4j
public
class
Sender
{
@Autowired
private
KafkaSender
<
String
,
String
>
kafkaEventProducer
;
private
static
final
String
TOPIC
=
"order"
;
public
void
sendOrderStatus
(
String
status
)
{
log
.
info
(
String
.
format
(
"Sender message: %s "
,
status
));
ProducerRecord
<
String
,
String
>
record
=
new
ProducerRecord
<>(
TOPIC
,
status
);
Flux
<
SenderResult
<
String
>>
working
=
kafkaEventProducer
.
send
(
Mono
.
just
(
SenderRecord
.
create
(
record
,
status
)))
.
doOnError
(
throwable
->
System
.
out
.
println
(
throwable
))
.
doOnNext
(
uuidSenderResult
->
{
if
(
null
!=
uuidSenderResult
.
exception
())
{
System
.
out
.
println
(
"it works!"
);
}
});
working
.
doOnError
(
throwable
->
log
.
error
(
"error"
)).
subscribe
();
}
}
src/main/java/com/ascendfinalproject/warehouse/services/Consumer.java
deleted
100644 → 0
View file @
b40629b8
package
com
.
ascendfinalproject
.
warehouse
.
services
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
org.springframework.kafka.annotation.KafkaListener
;
import
org.springframework.stereotype.Service
;
import
java.io.IOException
;
@Service
public
class
Consumer
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
Consumer
.
class
);
// this is placeholder
@KafkaListener
(
topics
=
"fulfilled"
,
groupId
=
"WAREHOUSE_MANAGEMENT"
)
public
void
consume
(
String
message
)
throws
IOException
{
logger
.
info
(
String
.
format
(
"#### -> Consumed message -> %s"
,
message
));
}
}
src/main/java/com/ascendfinalproject/warehouse/services/KafkaConsumerConfig.java
deleted
100644 → 0
View file @
b40629b8
package
com
.
ascendfinalproject
.
warehouse
.
services
;
import
org.apache.kafka.clients.consumer.ConsumerConfig
;
import
org.apache.kafka.common.serialization.StringDeserializer
;
import
org.springframework.context.annotation.Configuration
;
import
java.util.HashMap
;
import
java.util.Map
;
@Configuration
public
class
KafkaConsumerConfig
{
private
Map
<
String
,
Object
>
consumerConfig
()
{
Map
<
String
,
Object
>
config
=
new
HashMap
<>();
config
.
put
(
ConsumerConfig
.
GROUP_ID_CONFIG
,
"WAREHOUSE_MANAGEMENT"
);
config
.
put
(
ConsumerConfig
.
BOOTSTRAP_SERVERS_CONFIG
,
"localhost:9092"
);
config
.
put
(
ConsumerConfig
.
AUTO_OFFSET_RESET_CONFIG
,
"earliest"
);
config
.
put
(
ConsumerConfig
.
KEY_DESERIALIZER_CLASS_CONFIG
,
StringDeserializer
.
class
);
config
.
put
(
ConsumerConfig
.
VALUE_DESERIALIZER_CLASS_CONFIG
,
StringDeserializer
.
class
);
return
config
;
}
}
src/main/java/com/ascendfinalproject/warehouse/services/KafkaProducerConfig.java
deleted
100644 → 0
View file @
b40629b8
package
com
.
ascendfinalproject
.
warehouse
.
services
;
import
com.fasterxml.jackson.databind.JsonSerializer
;
import
org.apache.kafka.clients.producer.ProducerConfig
;
import
org.apache.kafka.common.serialization.StringSerializer
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.context.annotation.Bean
;
import
org.springframework.context.annotation.Configuration
;
import
org.springframework.kafka.core.KafkaTemplate
;
import
java.util.HashMap
;
import
java.util.Map
;
@Configuration
public
class
KafkaProducerConfig
{
private
Map
<
String
,
Object
>
producerConfig
()
{
Map
<
String
,
Object
>
config
=
new
HashMap
<>();
config
.
put
(
ProducerConfig
.
BOOTSTRAP_SERVERS_CONFIG
,
"localhost:9092"
);
config
.
put
(
ProducerConfig
.
KEY_SERIALIZER_CLASS_CONFIG
,
StringSerializer
.
class
);
config
.
put
(
ProducerConfig
.
VALUE_SERIALIZER_CLASS_CONFIG
,
JsonSerializer
.
class
);
return
config
;
}
}
src/main/java/com/ascendfinalproject/warehouse/services/Producer.java
deleted
100644 → 0
View file @
b40629b8
package
com
.
ascendfinalproject
.
warehouse
.
services
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.kafka.core.KafkaTemplate
;
import
org.springframework.stereotype.Service
;
@Service
public
class
Producer
{
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
Producer
.
class
);
private
static
final
String
FULFILLED
=
"fulfilled"
;
private
static
final
String
CANCELLED
=
"cancelled"
;
@Autowired
// publish messages to the topic
private
KafkaTemplate
<
String
,
String
>
kafkaTemplate
;
public
void
orderFulfilled
(
String
message
)
{
logger
.
info
(
String
.
format
(
"#### -> this order is fulfilled -> %s"
,
message
));
this
.
kafkaTemplate
.
send
(
FULFILLED
,
message
);
}
public
void
orderCancelled
(
String
message
)
{
logger
.
info
(
String
.
format
(
"#### -> this order is cancelled -> %s"
,
message
));
this
.
kafkaTemplate
.
send
(
CANCELLED
,
message
);
}
}
src/main/resources/application.properties
View file @
b253dadb
spring.data.mongodb.uri
=
mongodb+srv://warehouse1:ascendWarehouseProject@warehouse-cluster.xopll.mongodb.net/myFirstDatabase?retryWrites=true&w=majority
spring.data.mongodb.database
=
test
#server:
# port: 9000
#spring:
# kafka:
# consumer:
# bootstrap-servers: localhost:9092
# group-id: WAREHOUSE_MANAGEMENT
# auto-offset-reset: earliest
# key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
# value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
# producer:
# bootstrap-servers: localhost:9092
# key-serializer: org.apache.kafka.common.serialization.StringSerializer
#
value-serializer
:
org.apache.kafka.common.serialization.StringSerializer
\ No newline at end of file
kafka.producer.bootstrap-servers
:
localhost:9092
kafka.producer.acks
:
all
kafka.consumer.group-id
:
WAREHOUSE_MANAGEMENT
kafka.topic.input
:
test_topic
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment