-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathCodeconTransformStream.java
104 lines (81 loc) · 3.52 KB
/
CodeconTransformStream.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
package codecon;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler;
import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
import java.util.List;
import java.util.Map;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;
import codecon.avro.Event;
import codecon.avro.TransformedEvent;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
public class CodeconTransformStream {
private final Properties props;
private final String inputTopic;
private final Integer inputTopicPartitions;
private final Short inputTopicReplicationFactor;
private final String outputTopic;
private final Integer outputTopicPartitions;
private final Short outputTopicReplicationFactor;
public CodeconTransformStream(Properties props) {
this.props = props;
this.inputTopic = props.getProperty("input.topic.name");
this.inputTopicPartitions = Integer.parseInt(props.getProperty("input.topic.partitions"));
this.inputTopicReplicationFactor = Short.parseShort(props.getProperty("input.topic.replication.factor"));
this.outputTopic = props.getProperty("output.topic.name");
this.outputTopicPartitions = Integer.parseInt(props.getProperty("output.topic.partitions"));
this.outputTopicReplicationFactor = Short.parseShort(props.getProperty("output.topic.replication.factor"));
}
public void createTopics() {
AdminClient client = AdminClient.create(props);
// TODO Implementar
client.close();
}
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
// TODO Implementar
return builder.build();
}
private void run() {
createTopics();
KafkaStreams streams = new KafkaStreams(buildTopology(), props);
CountDownLatch latch = new CountDownLatch(1);
// Attach shutdown handler to catch Control-C.
Runtime.getRuntime().addShutdownHook(new Thread("shutdown-hook") {
@Override
public void run() {
streams.close();
latch.countDown();
}
});
try {
streams.cleanUp();
streams.start();
latch.await();
} catch (Throwable e) {
System.exit(1);
}
System.exit(0);
}
public static void main(String[] args) throws Exception {
if (args.length < 1) {
throw new IllegalArgumentException("This program takes the path to an environment configuration file as argument.");
}
Properties props = Helpers.loadProperties(args[0]);
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
props.put("default.deserialization.exception.handler", LogAndContinueExceptionHandler.class);
new CodeconTransformStream(props).run();
}
}