Reduce memory utilization in downstream projects creating multiple Alertmanager instances (#3092)

* Reduce memory utilization in downstream projects creating multiple Alertmanager instances

* Wrap the error instead of citing it

Signed-off-by: Marco Pracucci <marco@pracucci.com>
This commit is contained in:
Marco Pracucci 2022-10-06 09:01:51 +02:00 committed by GitHub
parent 2c04c5aaa7
commit 78b5a27d40
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 30 additions and 2 deletions

View File

@ -101,9 +101,9 @@ func NewAPI(
} }
// Load embedded swagger file. // Load embedded swagger file.
swaggerSpec, err := loads.Analyzed(restapi.SwaggerJSON, "") swaggerSpec, err := getSwaggerSpec()
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to load embedded swagger file: %v", err.Error()) return nil, err
} }
// Create new service API. // Create new service API.
@ -672,3 +672,31 @@ func parseFilter(filter []string) ([]*labels.Matcher, error) {
} }
return matchers, nil return matchers, nil
} }
var (
swaggerSpecCacheMx sync.Mutex
swaggerSpecCache *loads.Document
)
// getSwaggerSpec loads and caches the swagger spec. If a cached version already exists,
// it returns the cached one. The reason why we cache it is because some downstream projects
// (e.g. Grafana Mimir) creates many Alertmanager instances in the same process, so they would
// incur in a significant memory penalty if we would reload the swagger spec each time.
func getSwaggerSpec() (*loads.Document, error) {
swaggerSpecCacheMx.Lock()
defer swaggerSpecCacheMx.Unlock()
// Check if a cached version exists.
if swaggerSpecCache != nil {
return swaggerSpecCache, nil
}
// Load embedded swagger file.
swaggerSpec, err := loads.Analyzed(restapi.SwaggerJSON, "")
if err != nil {
return nil, fmt.Errorf("failed to load embedded swagger file: %w", err)
}
swaggerSpecCache = swaggerSpec
return swaggerSpec, nil
}