-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.go
137 lines (110 loc) · 2.9 KB
/
index.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
package main
import (
"bufio"
"context"
"encoding/json"
"fmt"
"github.com/ohmpatel1997/logs-extraction-elasticsearch/common"
elastic "github.com/olivere/elastic"
"log"
"os"
"strings"
"sync"
"time"
)
const (
FILENAME = "logs.log"
)
type Log struct {
CreatedOn time.Time `json:"created_on"`
Message string `json:"message"`
}
func ParseFile() {
file, err := os.Open(FILENAME)
if err != nil {
log.Fatal(err)
}
defer file.Close()
esClient, err := common.GetClient()
if err != nil {
log.Fatal(err)
}
c := context.Background()
ParseAndIndexFile(c, file, esClient)
}
func ParseAndIndexFile(c context.Context, file *os.File, client *elastic.Client) {
start := time.Now()
scanner := bufio.NewScanner(file)
linesChunkLen := 500 * 1024 //chunks of line to process
linesPool := sync.Pool{New: func() interface{} {
lines := make([]string, 0, linesChunkLen)
return lines
}}
lines := linesPool.Get().([]string)[:0]
logsPool := sync.Pool{New: func() interface{} {
entries := make([]Log, 0, linesChunkLen)
return entries
}}
wg := sync.WaitGroup{}
scanner.Scan()
for {
lines = append(lines, scanner.Text())
willScan := scanner.Scan()
if len(lines) == linesChunkLen || !willScan {
linesToProcess := lines
wg.Add(1) // add the count once every 100000 lines
go func() {
//decrease the count
defer wg.Done()
entries := logsPool.Get().([]Log)[:0]
defer linesPool.Put(linesToProcess) // put back the line slice in pool
defer logsPool.Put(entries) //put back the log slice in pool
for _, text := range linesToProcess {
entry := Log{}
logSlice := strings.SplitN(text, ",", 2)
logCreationTime := logSlice[0]
entry.Message = logSlice[1]
var err error
if entry.CreatedOn, err = time.Parse("2006-01-02T15:04:05.0000Z", logCreationTime); err != nil {
fmt.Printf("\n Could not able to parse the time :%s for log : %v", logCreationTime, text)
return
}
entries = append(entries, entry)
}
_, err := ParseAndIndexBulk(c, client, entries)
if err != nil {
fmt.Printf("\n Could not able to index the entries :%s", err.Error())
}
}()
lines = linesPool.Get().([]string)[:0] // get the new lines pool to store the new lines
}
if !willScan {
break
}
}
wg.Wait()
fmt.Printf("\n time: %v\n", time.Since(start))
}
func ParseAndIndexBulk(c context.Context, client *elastic.Client, entries []Log) (res *elastic.BulkResponse, err error) {
bulk := client.Bulk()
for _, log := range entries {
req := elastic.NewBulkIndexRequest()
jsonData, err := json.Marshal(log)
if err != nil {
return nil, err
}
req = req.OpType("index")
req = req.Index("logs_write")
req = req.Type("_doc")
req = req.Doc(string(jsonData))
bulk = bulk.Add(req)
}
bulk.Pipeline("dailyindex")
bulk.Pretty(true)
bulk.Human(true)
bulkResp, err := bulk.Do(c)
if err != nil {
return nil, err
}
return bulkResp, nil
}