scraping: hoist labels variable to save garbage
`lset` escapes to heap due to being passed through the text-parser interface, so we can reduce garbage by hoisting it out of the loop so only one allocation is done for every series in a scrape. Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
parent
b43358fd43
commit
7c934ae18c
|
@ -1525,6 +1525,7 @@ func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string,
|
||||||
appErrs = appendErrors{}
|
appErrs = appendErrors{}
|
||||||
sampleLimitErr error
|
sampleLimitErr error
|
||||||
bucketLimitErr error
|
bucketLimitErr error
|
||||||
|
lset labels.Labels // escapes to heap so hoisted out of loop
|
||||||
e exemplar.Exemplar // escapes to heap so hoisted out of loop
|
e exemplar.Exemplar // escapes to heap so hoisted out of loop
|
||||||
meta metadata.Metadata
|
meta metadata.Metadata
|
||||||
metadataChanged bool
|
metadataChanged bool
|
||||||
|
@ -1622,7 +1623,6 @@ loop:
|
||||||
ce, ok := sl.cache.get(met)
|
ce, ok := sl.cache.get(met)
|
||||||
var (
|
var (
|
||||||
ref storage.SeriesRef
|
ref storage.SeriesRef
|
||||||
lset labels.Labels
|
|
||||||
hash uint64
|
hash uint64
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue