restructure project
This commit is contained in:
41
pkg/sender/metrics.go
Normal file
41
pkg/sender/metrics.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package sender
|
||||
|
||||
import (
|
||||
"git.k-space.ee/k-space/logmower-shipper/pkg/globals"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
)
|
||||
|
||||
var (
|
||||
promShipperQueued = promauto.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: globals.PrometheusPrefix,
|
||||
// Subsystem: "shipper",
|
||||
Name: "shipper_record", // "queued",
|
||||
Help: "Log records in queue to be batched and sent to database",
|
||||
}, []string{"filename"})
|
||||
promShipperDbSent = promauto.NewCounterVec(prometheus.CounterOpts{
|
||||
Namespace: globals.PrometheusPrefix,
|
||||
// Subsystem: "shipper",
|
||||
Name: "record", // "sent",
|
||||
Help: "Log records successfully committed to database",
|
||||
}, []string{"filename"})
|
||||
promShipperBatchSizeResult = promauto.NewHistogram(prometheus.HistogramOpts{
|
||||
Namespace: globals.PrometheusPrefix,
|
||||
// Subsystem: "shipper",
|
||||
Name: "bulk_submission_message", // "items_in_batch"
|
||||
Help: "Batch size for database submissions",
|
||||
Buckets: []float64{1, 5, 10, 50, 100, 500, 1000, 5000, 10000},
|
||||
})
|
||||
promShipperDbSendError = promauto.NewCounterVec(prometheus.CounterOpts{
|
||||
Namespace: globals.PrometheusPrefix,
|
||||
// Subsystem: "shipper",
|
||||
Name: "insertion_error", // "errors",
|
||||
Help: "Errors while submitting to database", // TODO:
|
||||
}, []string{"filename"})
|
||||
promShipperSynced = promauto.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Namespace: globals.PrometheusPrefix,
|
||||
Subsystem: "shipper",
|
||||
Name: "batches_synced",
|
||||
Help: "All batches available have been committed database (0 or 1)",
|
||||
}, []string{"filename"})
|
||||
)
|
||||
73
pkg/sender/sender.go
Normal file
73
pkg/sender/sender.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package sender
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"git.k-space.ee/k-space/logmower-shipper/pkg/globals"
|
||||
m "git.k-space.ee/k-space/logmower-shipper/pkg/mongo"
|
||||
"github.com/jtagcat/util"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
const (
|
||||
MaxBatchItems = 10000
|
||||
MaxBatchTime = 5 * time.Second
|
||||
)
|
||||
|
||||
type Queue <-chan m.Record
|
||||
|
||||
func (queue Queue) Sender(db *mongo.Collection, metricsFilename string) {
|
||||
batched := make(chan []m.Record)
|
||||
|
||||
// metrics for batcher and queue
|
||||
go func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
promShipperQueued.WithLabelValues(metricsFilename).Set(float64(
|
||||
len(queue)))
|
||||
|
||||
timer := time.NewTimer(time.Second)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-timer.C:
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
util.Batch(MaxBatchItems, MaxBatchTime, queue, batched)
|
||||
// returns when sendQueue is closed
|
||||
}()
|
||||
|
||||
for {
|
||||
promShipperSynced.WithLabelValues(metricsFilename).Set(1)
|
||||
|
||||
batch, ok := <-batched
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
promShipperBatchSizeResult.Observe(float64(len(batch)))
|
||||
|
||||
promShipperSynced.WithLabelValues(metricsFilename).Set(0)
|
||||
|
||||
var batchBson []interface{} // mongo does not like typing
|
||||
for _, b := range batch {
|
||||
batchBson = append(batchBson, b.ToBson())
|
||||
}
|
||||
|
||||
result, err := db.InsertMany(globals.MongoTimeout(context.Background()), batchBson, nil)
|
||||
if err != nil {
|
||||
promShipperDbSendError.WithLabelValues(metricsFilename).Add(1)
|
||||
log.Printf("failure in batch submit to database: %e", err) // TODO: add some selective retry here or something, better error handling
|
||||
continue
|
||||
}
|
||||
|
||||
promShipperDbSent.WithLabelValues(metricsFilename).Add(float64(
|
||||
len(result.InsertedIDs)))
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user