forked from spirom/spark-streaming-with-kafka
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SparkKafkaSink.scala
54 lines (45 loc) · 1.27 KB
/
SparkKafkaSink.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
package util
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
/**
* For publishing to Kafka from every partition of an RDD -- see
* http://allegro.tech/2015/08/spark-kafka-integration.html
*
* @param createProducer
*/
class SparkKafkaSink(createProducer: () => KafkaProducer[String, String]) extends Serializable {
lazy val producer = createProducer()
/**
* Records assigned to partitions using the configured partitioner.
*
* @param topic
* @param key
* @param value
*/
def send(topic: String, key: String, value: String): Unit = {
producer.send(new ProducerRecord(topic, key, value))
}
/**
* Records assigned to partitions explicitly, ignoring the configured partitioner.
*
* @param topic
* @param partition
* @param key
* @param value
*/
def send(topic: String, partition: Int, key: String, value: String): Unit = {
producer.send(new ProducerRecord(topic, partition, key, value))
}
}
object SparkKafkaSink {
def apply(config: Properties): SparkKafkaSink = {
val f = () => {
val producer = new KafkaProducer[String, String](config)
sys.addShutdownHook {
producer.close()
}
producer
}
new SparkKafkaSink(f)
}
}