-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.go
134 lines (117 loc) · 2.93 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
// A little test utility that writes some moderately-interesting json data
// to a specified kafka cluster with the specified topic.
//
// Example usage: kafkawrite -host kafkahost:9092 -topic mytopic
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
"time"
"github.com/Shopify/sarama"
)
// readData reads from a github repo issues list and returns the resulting
// raw data.
func readData() []byte {
req, err := http.NewRequest(
"GET", "https://api.github.com/repos/elastic/beats/issues", nil)
if err != nil {
log.Fatal(err)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatal(err)
}
return bodyBytes
} else {
log.Fatal(fmt.Sprintf(
"Couldn't read github: response code %v\n", resp.StatusCode))
}
return nil
}
// Issue represents a github issue
type Issue struct {
//URL url.URL `json:"url"`
ID int `json:"id"`
Title string `json:"title"`
State string `json:"state"`
Body string `json:"body"`
}
// Issues is an array of github issues
type Issues []Issue
// sendData interprets the given bytes as json representation of github issues
// (Issues) and sends (a few test fields of) the individual issues to the
// target kafka cluster / topic as kafka events containing json.
func sendData(bytes []byte, host string, topic string) {
var issues Issues
err := json.Unmarshal(bytes, &issues)
if err != nil {
fmt.Println(err)
return
}
sent := 0
for _, issue := range issues {
blob, err := json.Marshal(issue)
if err != nil {
log.Print(err)
continue
}
err = writeToKafkaTopic(host, topic, string(blob), nil, time.Second*15)
if err != nil {
log.Print(err)
continue
}
sent++
}
fmt.Printf("%v / %v messages sent\n", sent, len(issues))
}
func saramaConfig() *sarama.Config {
config := sarama.NewConfig()
config.Producer.RequiredAcks = sarama.WaitForAll
config.Producer.Return.Successes = true
config.Producer.Partitioner = sarama.NewHashPartitioner
config.Version = sarama.V1_0_0_0
return config
}
func writeToKafkaTopic(
host string, topic string, message string,
headers []sarama.RecordHeader, timeout time.Duration,
) error {
config := saramaConfig()
hosts := []string{host}
producer, err := sarama.NewSyncProducer(hosts, config)
if err != nil {
return err
}
defer producer.Close()
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.StringEncoder(message),
Headers: headers,
}
_, _, err = producer.SendMessage(msg)
if err != nil {
return err
}
return nil
}
func main() {
host := flag.String("host", "", "kafka host")
topic := flag.String("topic", "", "kafka topic")
flag.Parse()
if *host == "" || *topic == "" {
log.Fatal("Host and topic must be provided.\n" +
"Usage: kafkawrite -host <host> -topic <topic>")
}
data := readData()
sendData(data, *host, *topic)
}