mirror of
https://github.com/prometheus-community/postgres_exporter
synced 2025-01-11 08:49:31 +00:00
Refactor repository layout and convert build system to Mage.
This commit implements a massive refactor of the repository, and moves the build system over to use Mage (magefile.org) which should allow seamless building across multiple platforms.
This commit is contained in:
parent
3e6cf08dc5
commit
989489096e
22
.gitignore
vendored
22
.gitignore
vendored
@ -1,15 +1,15 @@
|
||||
.build
|
||||
postgres_exporter
|
||||
postgres_exporter_integration_test
|
||||
/.build
|
||||
/postgres_exporter
|
||||
/postgres_exporter_integration_test
|
||||
*.tar.gz
|
||||
*.test
|
||||
*-stamp
|
||||
.idea
|
||||
/.idea
|
||||
*.iml
|
||||
cover.out
|
||||
cover.*.out
|
||||
.coverage
|
||||
bin
|
||||
release
|
||||
*.prom
|
||||
.metrics.*.*.prom
|
||||
/cover.out
|
||||
/cover.*.out
|
||||
/.coverage
|
||||
/bin
|
||||
/release
|
||||
/*.prom
|
||||
/.metrics.*.*.prom
|
||||
|
38
.travis.yml
38
.travis.yml
@ -3,37 +3,29 @@ services:
|
||||
- docker
|
||||
language: go
|
||||
go:
|
||||
- '1.9'
|
||||
# Make sure we have p2 and the postgres client.
|
||||
- '1.10'
|
||||
before_install:
|
||||
- go get -v github.com/mattn/goveralls
|
||||
- sudo wget -O /usr/local/bin/p2 https://github.com/wrouesnel/p2cli/releases/download/r4/p2 &&
|
||||
sudo chmod +x /usr/local/bin/p2
|
||||
- sudo wget -O /usr/local/bin/docker-compose https://github.com/docker/compose/releases/download/1.9.0-rc4/docker-compose-Linux-x86_64 &&
|
||||
sudo chmod +x /usr/local/bin/docker-compose
|
||||
- sudo wget -O /usr/local/bin/p2 https://github.com/wrouesnel/p2cli/releases/download/r4/p2
|
||||
&& sudo chmod +x /usr/local/bin/p2
|
||||
- sudo wget -O /usr/local/bin/docker-compose https://github.com/docker/compose/releases/download/1.9.0-rc4/docker-compose-Linux-x86_64
|
||||
&& sudo chmod +x /usr/local/bin/docker-compose
|
||||
- sudo apt-get update && sudo apt-get install postgresql-client-common
|
||||
|
||||
script:
|
||||
- make all
|
||||
- make docker
|
||||
- make test-integration
|
||||
- make cover.out
|
||||
- make release
|
||||
- $HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci
|
||||
- go run mage.go -v all
|
||||
- "$HOME/gopath/bin/goveralls -coverprofile=cover.out -service=travis-ci"
|
||||
- go run mage.go docker
|
||||
after_success:
|
||||
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
|
||||
# Push a tagged build if a tag is found.
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then
|
||||
docker tag wrouesnel/postgres_exporter:latest wrouesnel/postgres_exporter:$TRAVIS_TAG ;
|
||||
docker push wrouesnel/postgres_exporter:$TRAVIS_TAG ;
|
||||
fi
|
||||
# Push a latest version
|
||||
- if [ "$TRAVIS_BRANCH" == "master" ]; then docker push wrouesnel/postgres_exporter ; fi
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then docker tag wrouesnel/postgres_exporter:latest wrouesnel/postgres_exporter:$TRAVIS_TAG
|
||||
; docker push wrouesnel/postgres_exporter:$TRAVIS_TAG ; fi
|
||||
- if [ "$TRAVIS_BRANCH" == "master" ]; then docker push wrouesnel/postgres_exporter
|
||||
; fi
|
||||
env:
|
||||
global:
|
||||
- secure: RfoWQj5tEB/t3XL2tqJW7u7Qscpz1QBOfF9lMFpB4kAUMTtZU0zBbXfMo1JheGoJQQxD/7NLRHhbUWPT2489o3KKpRTQ7RHn3k8n5U7opH01bWX0+l/EPVmhlsKjSDSLGgmxz80j3I6C8ZV3qDUijSx7r90QUNHGbZtV7g+KtoUTpRV0zir/heK6qq9LHWNHbNsJyHK8qHmd6g1UzWIBaZPJ6a/n/rO2jq4uS1JR0VlIJPRF11HOLH8IjFQvVYpN7YbEslxyNsfQJUSP/7CghSLLVWPSATEjMm8a5GJVLc564+nYghm484psEtiMXkZ3n6ie7AT8aJrKfexWrwh2aCc+cK4PiyXrf4euZehZNYogmFCqWzd1LJKcN2uIkpBSuZQDm3e6c4qkkWGpx+RdFWtAMG8IgZLDbcuryxFNzMwHc2CJ009s9Zsa+g7D57csyR5LCZ8YtNGI3g8FmhwpCKvYkfKa9aijUEWyJMyT4Vhd/w7btMTuwYHgUQ85k4ov4Xjz5SNpAGgemig5G5w7PJj4NhGvIBz9weL154x/BFVjHOZZ6Y/bWgJIPoW1KM15x5K8QylWYEBUHtwiyVyXOxHqt6MOX1vYo1L37jMK88IErrfh/VmlxEhtN9wOghk8IudMfFwQtjIwiWlJf218wxMIzUjoyb5/25tU9f2OJrg=
|
||||
- secure: WP96T7yshE03XsPVc9ICbwZXZ6nVsQDCQ9NGKnIEQa4T1Swu5uLVzxjGeowHPykKbKphQmT8inMniBxB48OLc3VVqNxVI+00ppLPEf7n79w2wVbwFOEa6TiOdws+0wOphkeSYc0L+o2aSfoMKJHF+rVW9tmM2tszVjofYHhdWjqloc2pqsfOnqbR7icfpmzMWKyezIE20YOIBsiKZJTKXiZ1SaG9ExkNwuZ7L+HRF1yeI0OdAM4VfEzBK1Gwicy2BtrbyHnl4zgcSoIBmuzo+pNuvqyGmBn3C221M6ki7NoDJDfW5brcvDmiMODWGnka7iW0nt5aUbVtURM8BhWZR0uINo30aYjr4j39UBq8y+mqYV0dp/dMEmy2fa1mogr+DuHUNVSg59Au45AZeom8N6FT03nlg+RcG/tV1skvP/mn9n9CKsyfvC4Rf3jp4+LTiJ9JIch74MecRYVwlpKM+i8s6uDftt3mvxeIYdK+NEMcfwKMv8KTwuxRo/3KRhif7z2cOE+oMbT5POWO19bfboRPCs4xiMTcqpx8dJVs41SacY52PPgjFSnyVrKvzAyjn6mePjLhpoPZueHZuJYPNa9QC8JcASMlCI7lf2Eq+2Dmp2JxmndkRs/cIfHgmO4gtiNM7Vb/rlML1D/8LYPWU/Rdp82/yDffC0ugMNovXt0=
|
||||
- secure: RRQH4Tr94OblZoqls50BIoyK1OvK9fALs4pAq1Uk5dksY1NWnomheQzOaHzbVfMfXc4zXAzppZIqxUDGn8GiSLbtJL6pnxsxYNGoCGdS8lMjjKWXbCAs8TBJobi3krOOjqgbhOWTpyluTEShnBcg7CjrRQUa/ChS3uE5kl21/4eIl9Be6Q08Eqm3p1yvMAyAgIL6Y6oPAAtBa6zIsi2MSNlryz3RKHJO7AheilppYx3E8B03A+a/oqvTTcw6w/RwBYxB8MYfSLC0jSssZz5pUSX/byUaklGFhQLnKAzJyhrMOvRyMVcO4PHaLgVi1eAKQz6eLQh7uEiIqKh19cuvTbZHDgu8zMpLDTxOW9U95e4kbjOZ5pWZ7E5QTrb24RZIt42JGbmme7PWOvy3zNbWHwfwiOF1qwYwISUcj2KFCpes8mDGt6iv46LfdlU0uoZdZu3MAiTiW0u2SD5hIeFq4XYesPtkS/TKFoAbB5Tu1qbxdmYu5NqmfvmxsmeNEm4inFJ5ap3fRRCVo668Z6qRMuQ1URcEfOz8iEftP9CnwSOXRuiuMo+W9GgckRuDZcPyQMCftq8+PhB+SjK57zrAd4Kxqf6kVHV16tcWqmFjfJJUFqmL+gpjT/VMEVDY2FOnbOARjkeLTjVC4dADBjxfJ6wmlLrfHdUm4GinbaHq0iA=
|
||||
- DOCKER_USER=wrouesnel
|
||||
- DOCKER_EMAIL=w.rouesnel@gmail.com
|
||||
- secure: f0H5HKL/5f/ZZVGZ7puegWZ6eig0TmruihuSEJCx1+Y6yDZn7l8lH+eETP9KAzH27c3CG1F9ytu/3gnnTOafXnDLlCve3fL5sKF3+pNQRwi3IojsODjdfPW+KEbG+1RD7IgkCn+DSRmvvpLr4zGOmZFEM1ZtLL878u4Hsrv/X5pDbKJgG/cXDRJfsu/EcpviO4WM8zOakBY8QihXhGpZiRtpRDCXWjW49PdCkW9hsfzFaU1yjvih9EJ0cfcH+9CFCRkezwAPlCETbOv288uHXc6bCuEEX1bgJ0ZzEXYAyoO00+12ePbQZEGNikSVT55nfC+jZLLTavQkFi862Hcx/lmJpA/7aeNYOrDcomwWMRRc4Ava2+cod7acVvo45SHRq+Jj9ofDhj9s0T/aZwV+2doc9GwDN9J6aEs9Nham2G955K1H0fmMW9lv0ThSVEZ3XbzCHyR4nPAwJQXrzauqbbihCim/g/YC5gbVs7O/4GkN2Z9LK30IJr1/NtJdIa6fMk3Zdhp6LGbXCvVFRbE0rMiTLbB8O3ll2smCu3aFYv7J9IfvI0ol0ww7kULpyf/vqxkK0NJXsKgoK/Uo1lM9gNpJBHsMt9nWnDvLj2DKZNTqkxzJeG8O98ADrQWEGFhpcsSsbW9pAMsrp6D4LQikN8KoFvh9F8h9lBsYpafzlOA=
|
||||
deploy:
|
||||
skip_cleanup: true
|
||||
provider: releases
|
||||
|
22
README.md
22
README.md
@ -17,16 +17,19 @@ docker run --net=host -e DATA_SOURCE_NAME="postgresql://postgres:password@localh
|
||||
```
|
||||
|
||||
## Building and running
|
||||
|
||||
The build system is based on [Mage](https://magefile.org)
|
||||
|
||||
The default make file behavior is to build the binary:
|
||||
```
|
||||
go get github.com/wrouesnel/postgres_exporter
|
||||
cd ${GOPATH-$HOME/go}/src/github.com/wrouesnel/postgres_exporter
|
||||
make
|
||||
export DATA_SOURCE_NAME="postgresql://login:password@hostname:port/dbname"
|
||||
./postgres_exporter <flags>
|
||||
$ go get github.com/wrouesnel/postgres_exporter
|
||||
$ cd ${GOPATH-$HOME/go}/src/github.com/wrouesnel/postgres_exporter
|
||||
$ go run mage.go
|
||||
$ export DATA_SOURCE_NAME="postgresql://login:password@hostname:port/dbname"
|
||||
$ ./postgres_exporter <flags>
|
||||
```
|
||||
|
||||
To build the dockerfile, run `make docker`.
|
||||
To build the dockerfile, run `go run mage.go docker`.
|
||||
|
||||
This will build the docker image as `wrouesnel/postgres_exporter:latest`. This
|
||||
is a minimal docker image containing *just* postgres_exporter. By default no SSL
|
||||
@ -130,9 +133,6 @@ GRANT SELECT ON postgres_exporter.pg_stat_replication TO postgres_exporter;
|
||||
> ```
|
||||
|
||||
# Hacking
|
||||
|
||||
* The build system is currently only supported for Linux-like platforms. It
|
||||
depends on GNU Make.
|
||||
* To build a copy for your current architecture run `make binary` or just `make`
|
||||
* To build a copy for your current architecture run `go run mage.go binary` or just `go run mage.go`
|
||||
This will create a symlink to the just built binary in the root directory.
|
||||
* To build release tar balls run `make release`.
|
||||
* To build release tar balls run `go run mage.go release`.
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM postgres:9.6
|
||||
FROM postgres:10
|
||||
MAINTAINER Daniel Dent (https://www.danieldent.com)
|
||||
ENV PG_MAX_WAL_SENDERS 8
|
||||
ENV PG_WAL_KEEP_SEGMENTS 8
|
@ -12,11 +12,17 @@ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
METRICS_DIR=$(pwd)
|
||||
|
||||
# Read the absolute path to the exporter
|
||||
postgres_exporter=$(readlink -f $1)
|
||||
test_binary=$(readlink -f $2)
|
||||
postgres_exporter="$1"
|
||||
test_binary="$2"
|
||||
export POSTGRES_PASSWORD=postgres
|
||||
exporter_port=9187
|
||||
|
||||
echo "Exporter Binary: $postgres_exporter" 1>&2
|
||||
echo "Test Binary: $test_binary" 1>&2
|
||||
|
||||
[ -z "$postgres_exporter" ] && echo "Missing exporter binary" && exit 1
|
||||
[ -z "$test_binary" ] && echo "Missing test binary" && exit 1
|
||||
|
||||
cd $DIR
|
||||
|
||||
VERSIONS=( \
|
11
mage.go
Normal file
11
mage.go
Normal file
@ -0,0 +1,11 @@
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/magefile/mage/mage"
|
||||
)
|
||||
|
||||
func main() { os.Exit(mage.Main()) }
|
736
magefile.go
Normal file
736
magefile.go
Normal file
@ -0,0 +1,736 @@
|
||||
// +build mage
|
||||
// Self-contained go-project magefile.
|
||||
|
||||
// nolint: deadcode
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/magefile/mage/mg"
|
||||
"github.com/magefile/mage/sh"
|
||||
"github.com/magefile/mage/target"
|
||||
|
||||
"errors"
|
||||
"math/bits"
|
||||
"strconv"
|
||||
|
||||
"github.com/mholt/archiver"
|
||||
)
|
||||
|
||||
var curDir = func() string {
|
||||
name, _ := os.Getwd()
|
||||
return name
|
||||
}()
|
||||
|
||||
const constCoverageDir = ".coverage"
|
||||
const constToolDir = "tools"
|
||||
const constBinDir = "bin"
|
||||
const constReleaseDir = "release"
|
||||
const constCmdDir = "cmd"
|
||||
const constCoverFile = "cover.out"
|
||||
const constAssets = "assets"
|
||||
const constAssetsGenerated = "assets/generated"
|
||||
|
||||
var coverageDir = mustStr(filepath.Abs(path.Join(curDir, constCoverageDir)))
|
||||
var toolDir = mustStr(filepath.Abs(path.Join(curDir, constToolDir)))
|
||||
var binDir = mustStr(filepath.Abs(path.Join(curDir, constBinDir)))
|
||||
var releaseDir = mustStr(filepath.Abs(path.Join(curDir, constReleaseDir)))
|
||||
var cmdDir = mustStr(filepath.Abs(path.Join(curDir, constCmdDir)))
|
||||
var assetsGenerated = mustStr(filepath.Abs(path.Join(curDir, constAssetsGenerated)))
|
||||
|
||||
// Calculate file paths
|
||||
var toolsGoPath = toolDir
|
||||
var toolsSrcDir = mustStr(filepath.Abs(path.Join(toolDir, "src")))
|
||||
var toolsBinDir = mustStr(filepath.Abs(path.Join(toolDir, "bin")))
|
||||
var toolsVendorDir = mustStr(filepath.Abs(path.Join(toolDir, "vendor")))
|
||||
|
||||
var outputDirs = []string{binDir, releaseDir, toolsGoPath, toolsBinDir,
|
||||
toolsVendorDir, assetsGenerated, coverageDir}
|
||||
|
||||
var toolsEnv = map[string]string{"GOPATH": toolsGoPath}
|
||||
|
||||
var containerName = func() string {
|
||||
if name := os.Getenv("CONTAINER_NAME"); name != "" {
|
||||
return name
|
||||
}
|
||||
return "wrouesnel/postgres_exporter:latest"
|
||||
}()
|
||||
|
||||
type Platform struct {
|
||||
OS string
|
||||
Arch string
|
||||
BinSuffix string
|
||||
}
|
||||
|
||||
func (p *Platform) String() string {
|
||||
return fmt.Sprintf("%s-%s", p.OS, p.Arch)
|
||||
}
|
||||
|
||||
func (p *Platform) PlatformDir() string {
|
||||
platformDir := path.Join(binDir, fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String()))
|
||||
return platformDir
|
||||
}
|
||||
|
||||
func (p *Platform) PlatformBin(cmd string) string {
|
||||
platformBin := fmt.Sprintf("%s%s", cmd, p.BinSuffix)
|
||||
return path.Join(p.PlatformDir(), platformBin)
|
||||
}
|
||||
|
||||
func (p *Platform) ArchiveDir() string {
|
||||
return fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String())
|
||||
}
|
||||
|
||||
func (p *Platform) ReleaseBase() string {
|
||||
return path.Join(releaseDir, fmt.Sprintf("%s_%s_%s", productName, versionShort, p.String()))
|
||||
}
|
||||
|
||||
// Supported platforms
|
||||
var platforms []Platform = []Platform{
|
||||
{"linux", "amd64", ""},
|
||||
{"linux", "386", ""},
|
||||
{"darwin", "amd64", ""},
|
||||
{"darwin", "386", ""},
|
||||
{"windows", "amd64", ".exe"},
|
||||
{"windows", "386", ".exe"},
|
||||
}
|
||||
|
||||
// productName can be overridden by environ product name
|
||||
var productName = func() string {
|
||||
if name := os.Getenv("PRODUCT_NAME"); name != "" {
|
||||
return name
|
||||
}
|
||||
name, _ := os.Getwd()
|
||||
return path.Base(name)
|
||||
}()
|
||||
|
||||
// Source files
|
||||
var goSrc []string
|
||||
var goDirs []string
|
||||
var goPkgs []string
|
||||
var goCmds []string
|
||||
|
||||
var version = func() string {
|
||||
if v := os.Getenv("VERSION"); v != "" {
|
||||
return v
|
||||
}
|
||||
out, _ := sh.Output("git", "describe", "--dirty")
|
||||
|
||||
if out == "" {
|
||||
return "v0.0.0"
|
||||
}
|
||||
|
||||
return out
|
||||
}()
|
||||
|
||||
var versionShort = func() string {
|
||||
if v := os.Getenv("VERSION_SHORT"); v != "" {
|
||||
return v
|
||||
}
|
||||
out, _ := sh.Output("git", "describe", "--abbrev=0")
|
||||
|
||||
if out == "" {
|
||||
return "v0.0.0"
|
||||
}
|
||||
|
||||
return out
|
||||
}()
|
||||
|
||||
var concurrency = func() int {
|
||||
if v := os.Getenv("CONCURRENCY"); v != "" {
|
||||
pv, err := strconv.ParseUint(v, 10, bits.UintSize)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return int(pv)
|
||||
}
|
||||
return runtime.NumCPU()
|
||||
}()
|
||||
|
||||
var linterDeadline = func() time.Duration {
|
||||
if v := os.Getenv("LINTER_DEADLINE"); v != "" {
|
||||
d, _ := time.ParseDuration(v)
|
||||
if d != 0 {
|
||||
return d
|
||||
}
|
||||
}
|
||||
return time.Second * 60
|
||||
}()
|
||||
|
||||
func Log(args ...interface{}) {
|
||||
if mg.Verbose() {
|
||||
fmt.Println(args...)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Set environment
|
||||
os.Setenv("PATH", fmt.Sprintf("%s:%s", toolsBinDir, os.Getenv("PATH")))
|
||||
Log("Build PATH: ", os.Getenv("PATH"))
|
||||
Log("Concurrency:", concurrency)
|
||||
goSrc = func() []string {
|
||||
results := new([]string)
|
||||
filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
|
||||
// Look for files
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
// Exclusions
|
||||
if matched, _ := filepath.Match("*/vendor/*", path); matched {
|
||||
return nil
|
||||
} else if matched, _ := filepath.Match(fmt.Sprintf("%s/*", toolDir), path); matched {
|
||||
return nil
|
||||
} else if matched, _ := filepath.Match(fmt.Sprintf("%s/*", binDir), path); matched {
|
||||
return nil
|
||||
} else if matched, _ := filepath.Match(fmt.Sprintf("%s/*", releaseDir), path); matched {
|
||||
return nil
|
||||
}
|
||||
|
||||
if matched, _ := filepath.Match("*.go", path); !matched {
|
||||
return nil
|
||||
}
|
||||
|
||||
*results = append(*results, path)
|
||||
return nil
|
||||
})
|
||||
return *results
|
||||
}()
|
||||
goDirs = func() []string {
|
||||
resultMap := make(map[string]struct{})
|
||||
for _, path := range goSrc {
|
||||
absDir, err := filepath.Abs(filepath.Dir(path))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
resultMap[absDir] = struct{}{}
|
||||
}
|
||||
results := []string{}
|
||||
for k := range resultMap {
|
||||
results = append(results, k)
|
||||
}
|
||||
return results
|
||||
}()
|
||||
goPkgs = func() []string {
|
||||
results := []string{}
|
||||
out, err := sh.Output("go", "list", "./...")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for _, line := range strings.Split(out, "\n") {
|
||||
if !strings.Contains(line, "/vendor/") {
|
||||
results = append(results, line)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}()
|
||||
goCmds = func() []string {
|
||||
results := []string{}
|
||||
|
||||
finfos, err := ioutil.ReadDir(cmdDir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for _, finfo := range finfos {
|
||||
results = append(results, finfo.Name())
|
||||
}
|
||||
return results
|
||||
}()
|
||||
|
||||
// Ensure output dirs exist
|
||||
for _, dir := range outputDirs {
|
||||
os.MkdirAll(dir, os.FileMode(0777))
|
||||
}
|
||||
}
|
||||
|
||||
func mustStr(r string, err error) string {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func getCoreTools() []string {
|
||||
staticTools := []string{
|
||||
"github.com/kardianos/govendor",
|
||||
"github.com/wadey/gocovmerge",
|
||||
"github.com/mattn/goveralls",
|
||||
"github.com/tmthrgd/go-bindata/go-bindata",
|
||||
"github.com/GoASTScanner/gas/cmd/gas", // workaround for Ast scanner
|
||||
"github.com/alecthomas/gometalinter",
|
||||
}
|
||||
return staticTools
|
||||
}
|
||||
|
||||
func getMetalinters() []string {
|
||||
// Gometalinter should now be on the command line
|
||||
dynamicTools := []string{}
|
||||
|
||||
goMetalinterHelp, _ := sh.Output("gometalinter", "--help")
|
||||
linterRx := regexp.MustCompile(`\s+\w+:\s*\((.+)\)`)
|
||||
for _, l := range strings.Split(goMetalinterHelp, "\n") {
|
||||
linter := linterRx.FindStringSubmatch(l)
|
||||
if len(linter) > 1 {
|
||||
dynamicTools = append(dynamicTools, linter[1])
|
||||
}
|
||||
}
|
||||
return dynamicTools
|
||||
}
|
||||
|
||||
func ensureVendorSrcLink() error {
|
||||
Log("Symlink vendor to tools dir")
|
||||
if err := sh.Rm(toolsSrcDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.Symlink(toolsVendorDir, toolsSrcDir); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// concurrencyLimitedBuild executes a certain number of commands limited by concurrency
|
||||
func concurrencyLimitedBuild(buildCmds ...interface{}) error {
|
||||
resultsCh := make(chan error, len(buildCmds))
|
||||
concurrencyControl := make(chan struct{}, concurrency)
|
||||
for _, buildCmd := range buildCmds {
|
||||
go func(buildCmd interface{}) {
|
||||
concurrencyControl <- struct{}{}
|
||||
resultsCh <- buildCmd.(func() error)()
|
||||
<-concurrencyControl
|
||||
|
||||
}(buildCmd)
|
||||
}
|
||||
// Doesn't work at the moment
|
||||
// mg.Deps(buildCmds...)
|
||||
results := []error{}
|
||||
var resultErr error = nil
|
||||
for len(results) < len(buildCmds) {
|
||||
err := <-resultsCh
|
||||
results = append(results, err)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
resultErr = errors.New("parallel build failed")
|
||||
}
|
||||
fmt.Printf("Finished %v of %v\n", len(results), len(buildCmds))
|
||||
}
|
||||
|
||||
return resultErr
|
||||
}
|
||||
|
||||
// Tools builds build tools of the project and is depended on by all other build targets.
|
||||
func Tools() (err error) {
|
||||
// Catch panics and convert to errors
|
||||
defer func() {
|
||||
if perr := recover(); perr != nil {
|
||||
err = perr.(error)
|
||||
}
|
||||
}()
|
||||
|
||||
if err := ensureVendorSrcLink(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
toolBuild := func(toolType string, tools ...string) error {
|
||||
toolTargets := []interface{}{}
|
||||
for _, toolImport := range tools {
|
||||
toolParts := strings.Split(toolImport, "/")
|
||||
toolBin := path.Join(toolsBinDir, toolParts[len(toolParts)-1])
|
||||
Log("Check for changes:", toolBin, toolsVendorDir)
|
||||
changed, terr := target.Dir(toolBin, toolsVendorDir)
|
||||
if terr != nil {
|
||||
if !os.IsNotExist(terr) {
|
||||
panic(terr)
|
||||
}
|
||||
changed = true
|
||||
}
|
||||
if changed {
|
||||
localToolImport := toolImport
|
||||
f := func() error { return sh.RunWith(toolsEnv, "go", "install", "-v", localToolImport) }
|
||||
toolTargets = append(toolTargets, f)
|
||||
}
|
||||
}
|
||||
|
||||
Log("Build", toolType, "tools")
|
||||
if berr := concurrencyLimitedBuild(toolTargets...); berr != nil {
|
||||
return berr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if berr := toolBuild("static", getCoreTools()...); berr != nil {
|
||||
return berr
|
||||
}
|
||||
|
||||
if berr := toolBuild("static", getMetalinters()...); berr != nil {
|
||||
return berr
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateTools automatically updates tool dependencies to the latest version.
|
||||
func UpdateTools() error {
|
||||
if err := ensureVendorSrcLink(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Ensure govendor is up to date without doing anything
|
||||
govendorPkg := "github.com/kardianos/govendor"
|
||||
govendorParts := strings.Split(govendorPkg, "/")
|
||||
govendorBin := path.Join(toolsBinDir, govendorParts[len(govendorParts)-1])
|
||||
|
||||
sh.RunWith(toolsEnv, "go", "get", "-v", "-u", govendorPkg)
|
||||
|
||||
if changed, cerr := target.Dir(govendorBin, toolsSrcDir); changed || os.IsNotExist(cerr) {
|
||||
if err := sh.RunWith(toolsEnv, "go", "install", "-v", govendorPkg); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if cerr != nil {
|
||||
panic(cerr)
|
||||
}
|
||||
|
||||
// Set current directory so govendor has the right path
|
||||
previousPwd, wderr := os.Getwd()
|
||||
if wderr != nil {
|
||||
return wderr
|
||||
}
|
||||
if err := os.Chdir(toolDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// govendor fetch core tools
|
||||
for _, toolImport := range append(getCoreTools(), getMetalinters()...) {
|
||||
sh.RunV("govendor", "fetch", "-v", toolImport)
|
||||
}
|
||||
|
||||
// change back to original working directory
|
||||
if err := os.Chdir(previousPwd); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Assets builds binary assets to be bundled into the binary.
|
||||
func Assets() error {
|
||||
mg.Deps(Tools)
|
||||
|
||||
if err := os.MkdirAll("assets/generated", os.FileMode(0777)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return sh.RunV("go-bindata", "-pkg=assets", "-o", "assets/bindata.go", "-ignore=bindata.go",
|
||||
"-ignore=.*.map$", "-prefix=assets/generated", "assets/generated/...")
|
||||
}
|
||||
|
||||
// Lint runs gometalinter for code quality. CI will run this before accepting PRs.
|
||||
func Lint() error {
|
||||
mg.Deps(Tools)
|
||||
args := []string{"-j", fmt.Sprintf("%v", concurrency), fmt.Sprintf("--deadline=%s",
|
||||
linterDeadline.String()), "--enable-all", "--line-length=120",
|
||||
"--disable=gocyclo", "--disable=testify", "--disable=test", "--exclude=assets/bindata.go"}
|
||||
return sh.RunV("gometalinter", append(args, goDirs...)...)
|
||||
}
|
||||
|
||||
// Style checks formatting of the file. CI will run this before acceptiing PRs.
|
||||
func Style() error {
|
||||
mg.Deps(Tools)
|
||||
args := []string{"--disable-all", "--enable=gofmt", "--enable=goimports"}
|
||||
return sh.RunV("gometalinter", append(args, goSrc...)...)
|
||||
}
|
||||
|
||||
// Fmt automatically formats all source code files
|
||||
func Fmt() error {
|
||||
mg.Deps(Tools)
|
||||
fmtErr := sh.RunV("gofmt", append([]string{"-s", "-w"}, goSrc...)...)
|
||||
if fmtErr != nil {
|
||||
return fmtErr
|
||||
}
|
||||
impErr := sh.RunV("goimports", append([]string{"-w"}, goSrc...)...)
|
||||
if impErr != nil {
|
||||
return fmtErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func listCoverageFiles() ([]string, error) {
|
||||
result := []string{}
|
||||
finfos, derr := ioutil.ReadDir(coverageDir)
|
||||
if derr != nil {
|
||||
return result, derr
|
||||
}
|
||||
for _, finfo := range finfos {
|
||||
result = append(result, path.Join(coverageDir, finfo.Name()))
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Test run test suite
|
||||
func Test() error {
|
||||
mg.Deps(Tools)
|
||||
|
||||
// Ensure coverage directory exists
|
||||
if err := os.MkdirAll(coverageDir, os.FileMode(0777)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Clean up coverage directory
|
||||
coverFiles, derr := listCoverageFiles()
|
||||
if derr != nil {
|
||||
return derr
|
||||
}
|
||||
for _, coverFile := range coverFiles {
|
||||
if err := sh.Rm(coverFile); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Run tests
|
||||
coverProfiles := []string{}
|
||||
for _, pkg := range goPkgs {
|
||||
coverProfile := path.Join(coverageDir, fmt.Sprintf("%s%s", strings.Replace(pkg, "/", "-", -1), ".out"))
|
||||
testErr := sh.Run("go", "test", "-v", "-covermode", "count", fmt.Sprintf("-coverprofile=%s", coverProfile),
|
||||
pkg)
|
||||
if testErr != nil {
|
||||
return testErr
|
||||
}
|
||||
coverProfiles = append(coverProfiles, coverProfile)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Build the intgration test binary
|
||||
func IntegrationTestBinary() error {
|
||||
changed, err := target.Path("postgres_exporter_integration_test", goSrc...)
|
||||
if (changed && (err == nil)) || os.IsNotExist(err) {
|
||||
return sh.RunWith(map[string]string{"CGO_ENABLED": "0"}, "go", "test", "./cmd/postgres_exporter",
|
||||
"-c", "-tags", "integration",
|
||||
"-a", "-ldflags", "-extldflags '-static'", "-X", fmt.Sprintf("main.Version=%s", version),
|
||||
"-o", "postgres_exporter_integration_test", "-cover", "-covermode", "count")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// TestIntegration runs integration tests
|
||||
func TestIntegration() error {
|
||||
mg.Deps(Binary, IntegrationTestBinary)
|
||||
|
||||
exporterPath := mustStr(filepath.Abs("postgres_exporter"))
|
||||
testBinaryPath := mustStr(filepath.Abs("postgres_exporter_integration_test"))
|
||||
testScriptPath := mustStr(filepath.Abs("postgres_exporter_integration_test_script"))
|
||||
|
||||
integrationCoverageProfile := path.Join(coverageDir, "cover.integration.out")
|
||||
|
||||
return sh.RunV("cmd/postgres_exporter/tests/test-smoke", exporterPath,
|
||||
fmt.Sprintf("%s %s %s", testScriptPath, testBinaryPath, integrationCoverageProfile))
|
||||
}
|
||||
|
||||
// Coverage sums up the coverage profiles in .coverage. It does not clean up after itself or before.
|
||||
func Coverage() error {
|
||||
// Clean up coverage directory
|
||||
coverFiles, derr := listCoverageFiles()
|
||||
if derr != nil {
|
||||
return derr
|
||||
}
|
||||
|
||||
mergedCoverage, err := sh.Output("gocovmerge", coverFiles...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ioutil.WriteFile(constCoverFile, []byte(mergedCoverage), os.FileMode(0777))
|
||||
}
|
||||
|
||||
// All runs a full suite suitable for CI
|
||||
func All() error {
|
||||
mg.SerialDeps(Style, Lint, Test, TestIntegration, Coverage, Release)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Release builds release archives under the release/ directory
|
||||
func Release() error {
|
||||
mg.Deps(ReleaseBin)
|
||||
|
||||
for _, platform := range platforms {
|
||||
owd, wderr := os.Getwd()
|
||||
if wderr != nil {
|
||||
return wderr
|
||||
}
|
||||
os.Chdir(binDir)
|
||||
|
||||
if platform.OS == "windows" {
|
||||
// build a zip binary as well
|
||||
err := archiver.Zip.Make(fmt.Sprintf("%s.zip", platform.ReleaseBase()), []string{platform.ArchiveDir()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// build tar gz
|
||||
err := archiver.TarGz.Make(fmt.Sprintf("%s.tar.gz", platform.ReleaseBase()), []string{platform.ArchiveDir()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
os.Chdir(owd)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeBuilder(cmd string, platform Platform) func() error {
|
||||
f := func() error {
|
||||
// Depend on assets
|
||||
mg.Deps(Assets)
|
||||
|
||||
cmdSrc := fmt.Sprintf("./%s/%s", mustStr(filepath.Rel(curDir, cmdDir)), cmd)
|
||||
|
||||
Log("Make platform binary directory:", platform.PlatformDir())
|
||||
if err := os.MkdirAll(platform.PlatformDir(), os.FileMode(0777)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
Log("Checking for changes:", platform.PlatformBin(cmd))
|
||||
if changed, err := target.Path(platform.PlatformBin(cmd), goSrc...); !changed {
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("Building", platform.PlatformBin(cmd))
|
||||
return sh.RunWith(map[string]string{"CGO_ENABLED": "0", "GOOS": platform.OS, "GOARCH": platform.Arch},
|
||||
"go", "build", "-a", "-ldflags", fmt.Sprintf("-extldflags '-static' -X version.Version=%s", version),
|
||||
"-o", platform.PlatformBin(cmd), cmdSrc)
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func getCurrentPlatform() *Platform {
|
||||
var curPlatform *Platform
|
||||
for _, p := range platforms {
|
||||
if p.OS == runtime.GOOS && p.Arch == runtime.GOARCH {
|
||||
storedP := p
|
||||
curPlatform = &storedP
|
||||
}
|
||||
}
|
||||
Log("Determined current platform:", curPlatform)
|
||||
return curPlatform
|
||||
}
|
||||
|
||||
// Binary build a binary for the current platform
|
||||
func Binary() error {
|
||||
curPlatform := getCurrentPlatform()
|
||||
if curPlatform == nil {
|
||||
return errors.New("current platform is not supported")
|
||||
}
|
||||
|
||||
for _, cmd := range goCmds {
|
||||
err := makeBuilder(cmd, *curPlatform)()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Make a root symlink to the build
|
||||
cmdPath := path.Join(curDir, cmd)
|
||||
os.Remove(cmdPath)
|
||||
if err := os.Symlink(curPlatform.PlatformBin(cmd), cmdPath); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReleaseBin builds cross-platform release binaries under the bin/ directory
|
||||
func ReleaseBin() error {
|
||||
buildCmds := []interface{}{}
|
||||
|
||||
for _, cmd := range goCmds {
|
||||
for _, platform := range platforms {
|
||||
buildCmds = append(buildCmds, makeBuilder(cmd, platform))
|
||||
}
|
||||
}
|
||||
|
||||
resultsCh := make(chan error, len(buildCmds))
|
||||
concurrencyControl := make(chan struct{}, concurrency)
|
||||
for _, buildCmd := range buildCmds {
|
||||
go func(buildCmd interface{}) {
|
||||
concurrencyControl <- struct{}{}
|
||||
resultsCh <- buildCmd.(func() error)()
|
||||
<-concurrencyControl
|
||||
|
||||
}(buildCmd)
|
||||
}
|
||||
// Doesn't work at the moment
|
||||
// mg.Deps(buildCmds...)
|
||||
results := []error{}
|
||||
var resultErr error = nil
|
||||
for len(results) < len(buildCmds) {
|
||||
err := <-resultsCh
|
||||
results = append(results, err)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
resultErr = errors.New("parallel build failed")
|
||||
}
|
||||
fmt.Printf("Finished %v of %v\n", len(results), len(buildCmds))
|
||||
}
|
||||
|
||||
return resultErr
|
||||
}
|
||||
|
||||
// Docker builds the docker image
|
||||
func Docker() error {
|
||||
mg.Deps(Binary)
|
||||
p := getCurrentPlatform()
|
||||
if p == nil {
|
||||
return errors.New("current platform is not supported")
|
||||
}
|
||||
|
||||
return sh.RunV("docker", "build",
|
||||
fmt.Sprintf("--build-arg=binary=%s",
|
||||
mustStr(filepath.Rel(curDir, p.PlatformBin("postgres_exporter")))),
|
||||
"-t", containerName, ".")
|
||||
}
|
||||
|
||||
// Clean deletes build output and cleans up the working directory
|
||||
func Clean() error {
|
||||
for _, name := range goCmds {
|
||||
if err := sh.Rm(path.Join(binDir, name)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
for _, name := range outputDirs {
|
||||
if err := sh.Rm(name); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Debug prints the value of internal state variables
|
||||
func Debug() error {
|
||||
fmt.Println("Source Files:", goSrc)
|
||||
fmt.Println("Packages:", goPkgs)
|
||||
fmt.Println("Directories:", goDirs)
|
||||
fmt.Println("Command Paths:", goCmds)
|
||||
fmt.Println("Output Dirs:", outputDirs)
|
||||
fmt.Println("Tool Src Dir:", toolsSrcDir)
|
||||
fmt.Println("Tool Vendor Dir:", toolsVendorDir)
|
||||
fmt.Println("Tool GOPATH:", toolsGoPath)
|
||||
fmt.Println("PATH:", os.Getenv("PATH"))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Autogen configure local git repository with commit hooks
|
||||
func Autogen() error {
|
||||
fmt.Println("Installing git hooks in local repository...")
|
||||
return os.Link(path.Join(curDir, toolDir, "pre-commit"), ".git/hooks/pre-commit")
|
||||
}
|
@ -7,6 +7,9 @@ shift
|
||||
output_cov=$1
|
||||
shift
|
||||
|
||||
echo "Test Binary: $test_binary" 1>&2
|
||||
echo "Coverage File: $output_cov" 1>&2
|
||||
|
||||
echo "mode: count" > $output_cov
|
||||
|
||||
test_cov=$(mktemp)
|
||||
|
@ -1,67 +0,0 @@
|
||||
# Makefile to build the tools used in the build system.
|
||||
# If recreating from scratch, you will need a local install of govendor
|
||||
# and to run govendor init in this folder before running govendor fetch.
|
||||
|
||||
# Ensure we use local bin dir
|
||||
export PATH := bin:$(PATH)
|
||||
SHELL := env PATH=$(PATH) /bin/bash
|
||||
|
||||
THIS_FILE := $(lastword $(MAKEFILE_LIST))
|
||||
|
||||
# This function is used to get the linters used by metalinter
|
||||
get_metalinters := gometalinter --help | grep -oP '\s+\w+:\s*\(.+\)' | tr -s ' ' | cut -d' ' -f3 | grep -oP '[^()]+'
|
||||
|
||||
# This is a list of external tools we want to vendor
|
||||
TOOL_SRCS := github.com/kardianos/govendor \
|
||||
github.com/wadey/gocovmerge \
|
||||
github.com/mattn/goveralls \
|
||||
github.com/alecthomas/gometalinter
|
||||
|
||||
# This is populated by imported dependencies from gometalinter
|
||||
METATOOL_SRCS :=
|
||||
|
||||
GO_SRC := $(shell find $(SOURCEDIR) -name '*.go')
|
||||
|
||||
GO := GOPATH=$(shell pwd) go
|
||||
|
||||
DEFAULT: all
|
||||
|
||||
tools.deps: $(GO_SRC)
|
||||
@# Generate build patterns for static tools
|
||||
@for pkg in $(TOOL_SRCS); do \
|
||||
echo -e "bin/$$(basename $$pkg): $$GO_SRC\n\t\$$(GO) install -v $$pkg" ; \
|
||||
done > tools.deps
|
||||
|
||||
-include tools.deps
|
||||
|
||||
metatools.deps: bin/gometalinter $(GO_SRC)
|
||||
# Generate build patterns for metalinters tools
|
||||
@echo -e "METATOOL_SRCS+=$(shell $(get_metalinters))" > metatools.deps
|
||||
@for pkg in $(shell $(get_metalinters)) ; do \
|
||||
echo -e "bin/$$(basename $$pkg): $$GO_SRC\n\t\$$(GO) install -v $$pkg" ; \
|
||||
done >> metatools.deps
|
||||
|
||||
-include metatools.deps
|
||||
|
||||
update:
|
||||
# Fetch govendor, then rebuild govendor.
|
||||
govendor fetch github.com/kardianos/govendor
|
||||
$(GO) install -v github.com/kardianos/govendor
|
||||
# Fetch gometalinter and rebuild gometalinter.
|
||||
govendor fetch github.com/alecthomas/gometalinter
|
||||
$(GO) install -v github.com/alecthomas/gometalinter
|
||||
$(MAKE) -f $(THIS_FILE) update-phase-2
|
||||
|
||||
update-phase-2:
|
||||
# Fetch the new metalinter list.
|
||||
for pkg in $(TOOL_SRCS) $$($(get_metalinters)); do \
|
||||
govendor fetch -v $$pkg ; \
|
||||
done
|
||||
|
||||
clean:
|
||||
rm -rf bin pkg tools.deps metatools.deps
|
||||
|
||||
all: $(addprefix bin/,$(notdir $(TOOL_SRCS) $(METATOOL_SRCS) ))
|
||||
|
||||
# TOOL_SRCS is included here since we'll never really have these files.
|
||||
.PHONY: all update clean $(TOOL_SRCS) $(METATOOL_SRCS)
|
@ -1 +1 @@
|
||||
vendor
|
||||
/home/will/src/go/src/github.com/wrouesnel/postgres_exporter/tools/vendor
|
39
tools/vendor/github.com/GoASTScanner/gas/README.md
generated
vendored
39
tools/vendor/github.com/GoASTScanner/gas/README.md
generated
vendored
@ -18,6 +18,10 @@ You may obtain a copy of the License [here](http://www.apache.org/licenses/LICEN
|
||||
Gas is still in alpha and accepting feedback from early adopters. We do
|
||||
not consider it production ready at this time.
|
||||
|
||||
### Install
|
||||
|
||||
`$ go get github.com/GoASTScanner/gas/cmd/gas/...`
|
||||
|
||||
### Usage
|
||||
|
||||
Gas can be configured to only run a subset of rules, to exclude certain file
|
||||
@ -37,6 +41,7 @@ or to specify a set of rules to explicitly exclude using the '-exclude=' flag.
|
||||
- G103: Audit the use of unsafe block
|
||||
- G104: Audit errors not checked
|
||||
- G105: Audit the use of math/big.Int.Exp
|
||||
- G106: Audit the use of ssh.InsecureIgnoreHostKey
|
||||
- G201: SQL query construction using format string
|
||||
- G202: SQL query construction using string concatenation
|
||||
- G203: Use of unescaped data in HTML templates
|
||||
@ -64,12 +69,8 @@ $ gas -exclude=G303 ./...
|
||||
|
||||
#### Excluding files:
|
||||
|
||||
Gas can be told to \ignore paths that match a supplied pattern using the 'skip' command line option. This is
|
||||
accomplished via [go-glob](github.com/ryanuber/go-glob). Multiple patterns can be specified as follows:
|
||||
|
||||
```
|
||||
$ gas -skip=tests* -skip=*_example.go ./...
|
||||
```
|
||||
Gas will ignore dependencies in your vendor directory any files
|
||||
that are not considered build artifacts by the compiler (so test files).
|
||||
|
||||
#### Annotating code
|
||||
|
||||
@ -104,7 +105,7 @@ $ gas -nosec=true ./...
|
||||
|
||||
### Output formats
|
||||
|
||||
Gas currently supports text, json and csv output formats. By default
|
||||
Gas currently supports text, json, yaml, csv and JUnit XML output formats. By default
|
||||
results will be reported to stdout, but can also be written to an output
|
||||
file. The output format is controlled by the '-fmt' flag, and the output file is controlled by the '-out' flag as follows:
|
||||
|
||||
@ -113,19 +114,21 @@ file. The output format is controlled by the '-fmt' flag, and the output file is
|
||||
$ gas -fmt=json -out=results.json *.go
|
||||
```
|
||||
|
||||
### Docker container
|
||||
### Generate TLS rule
|
||||
|
||||
A Dockerfile is included with the Gas source code to provide a container that
|
||||
allows users to easily run Gas on their code. It builds Gas, then runs it on
|
||||
all Go files in your current directory. Use the following commands to build
|
||||
and run locally:
|
||||
The configuration of TLS rule can be generated from [Mozilla's TLS ciphers recommendation](https://statics.tls.security.mozilla.org/server-side-tls-conf.json).
|
||||
|
||||
To build: (run command in cloned Gas source code directory)
|
||||
docker build --build-arg http_proxy --build-arg https_proxy
|
||||
--build-arg no_proxy -t goastscanner/gas:latest .
|
||||
|
||||
To run: (run command in desired directory with Go files)
|
||||
docker run -v $PWD:$PWD --workdir $PWD goastscanner/gas:latest
|
||||
First you need to install the generator tool:
|
||||
|
||||
Note: Docker version 17.05 or later is required (to permit multistage build).
|
||||
```
|
||||
go get github.com/GoASTScanner/gas/cmd/tlsconfig/...
|
||||
```
|
||||
|
||||
You can invoke now the `go generate` in the root of the project:
|
||||
|
||||
```
|
||||
go generate ./...
|
||||
```
|
||||
|
||||
This will generate the `rules/tls_config.go` file with will contain the current ciphers recommendation from Mozilla.
|
||||
|
197
tools/vendor/github.com/GoASTScanner/gas/analyzer.go
generated
vendored
Normal file
197
tools/vendor/github.com/GoASTScanner/gas/analyzer.go
generated
vendored
Normal file
@ -0,0 +1,197 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package gas holds the central scanning logic used by GAS
|
||||
package gas
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
// The Context is populated with data parsed from the source code as it is scanned.
|
||||
// It is passed through to all rule functions as they are called. Rules may use
|
||||
// this data in conjunction withe the encoutered AST node.
|
||||
type Context struct {
|
||||
FileSet *token.FileSet
|
||||
Comments ast.CommentMap
|
||||
Info *types.Info
|
||||
Pkg *types.Package
|
||||
Root *ast.File
|
||||
Config map[string]interface{}
|
||||
Imports *ImportTracker
|
||||
}
|
||||
|
||||
// Metrics used when reporting information about a scanning run.
|
||||
type Metrics struct {
|
||||
NumFiles int `json:"files"`
|
||||
NumLines int `json:"lines"`
|
||||
NumNosec int `json:"nosec"`
|
||||
NumFound int `json:"found"`
|
||||
}
|
||||
|
||||
// Analyzer object is the main object of GAS. It has methods traverse an AST
|
||||
// and invoke the correct checking rules as on each node as required.
|
||||
type Analyzer struct {
|
||||
ignoreNosec bool
|
||||
ruleset RuleSet
|
||||
context *Context
|
||||
config Config
|
||||
logger *log.Logger
|
||||
issues []*Issue
|
||||
stats *Metrics
|
||||
}
|
||||
|
||||
// NewAnalyzer builds a new anaylzer.
|
||||
func NewAnalyzer(conf Config, logger *log.Logger) *Analyzer {
|
||||
ignoreNoSec := false
|
||||
if setting, err := conf.GetGlobal("nosec"); err == nil {
|
||||
ignoreNoSec = setting == "true" || setting == "enabled"
|
||||
}
|
||||
if logger == nil {
|
||||
logger = log.New(os.Stderr, "[gas]", log.LstdFlags)
|
||||
}
|
||||
return &Analyzer{
|
||||
ignoreNosec: ignoreNoSec,
|
||||
ruleset: make(RuleSet),
|
||||
context: &Context{},
|
||||
config: conf,
|
||||
logger: logger,
|
||||
issues: make([]*Issue, 0, 16),
|
||||
stats: &Metrics{},
|
||||
}
|
||||
}
|
||||
|
||||
// LoadRules instantiates all the rules to be used when analyzing source
|
||||
// packages
|
||||
func (gas *Analyzer) LoadRules(ruleDefinitions ...RuleBuilder) {
|
||||
for _, builder := range ruleDefinitions {
|
||||
r, nodes := builder(gas.config)
|
||||
gas.ruleset.Register(r, nodes...)
|
||||
}
|
||||
}
|
||||
|
||||
// Process kicks off the analysis process for a given package
|
||||
func (gas *Analyzer) Process(packagePaths ...string) error {
|
||||
packageConfig := loader.Config{
|
||||
Build: &build.Default,
|
||||
ParserMode: parser.ParseComments,
|
||||
AllowErrors: true,
|
||||
}
|
||||
for _, packagePath := range packagePaths {
|
||||
abspath, err := filepath.Abs(packagePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
gas.logger.Println("Searching directory:", abspath)
|
||||
|
||||
basePackage, err := build.Default.ImportDir(packagePath, build.ImportComment)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var packageFiles []string
|
||||
for _, filename := range basePackage.GoFiles {
|
||||
packageFiles = append(packageFiles, path.Join(packagePath, filename))
|
||||
}
|
||||
|
||||
packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...)
|
||||
}
|
||||
|
||||
builtPackage, err := packageConfig.Load()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pkg := range builtPackage.Created {
|
||||
gas.logger.Println("Checking package:", pkg.String())
|
||||
for _, file := range pkg.Files {
|
||||
gas.logger.Println("Checking file:", builtPackage.Fset.File(file.Pos()).Name())
|
||||
gas.context.FileSet = builtPackage.Fset
|
||||
gas.context.Config = gas.config
|
||||
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments)
|
||||
gas.context.Root = file
|
||||
gas.context.Info = &pkg.Info
|
||||
gas.context.Pkg = pkg.Pkg
|
||||
gas.context.Imports = NewImportTracker()
|
||||
gas.context.Imports.TrackPackages(gas.context.Pkg.Imports()...)
|
||||
ast.Walk(gas, file)
|
||||
gas.stats.NumFiles++
|
||||
gas.stats.NumLines += builtPackage.Fset.File(file.Pos()).LineCount()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
|
||||
func (gas *Analyzer) ignore(n ast.Node) bool {
|
||||
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
|
||||
for _, group := range groups {
|
||||
if strings.Contains(group.Text(), "#nosec") {
|
||||
gas.stats.NumNosec++
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Visit runs the GAS visitor logic over an AST created by parsing go code.
|
||||
// Rule methods added with AddRule will be invoked as necessary.
|
||||
func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
|
||||
if !gas.ignore(n) {
|
||||
|
||||
// Track aliased and initialization imports
|
||||
gas.context.Imports.TrackImport(n)
|
||||
|
||||
for _, rule := range gas.ruleset.RegisteredFor(n) {
|
||||
issue, err := rule.Match(n, gas.context)
|
||||
if err != nil {
|
||||
file, line := GetLocation(n, gas.context)
|
||||
file = path.Base(file)
|
||||
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
||||
}
|
||||
if issue != nil {
|
||||
gas.issues = append(gas.issues, issue)
|
||||
gas.stats.NumFound++
|
||||
}
|
||||
}
|
||||
return gas
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Report returns the current issues discovered and the metrics about the scan
|
||||
func (gas *Analyzer) Report() ([]*Issue, *Metrics) {
|
||||
return gas.issues, gas.stats
|
||||
}
|
||||
|
||||
// Reset clears state such as context, issues and metrics from the configured analyzer
|
||||
func (gas *Analyzer) Reset() {
|
||||
gas.context = &Context{}
|
||||
gas.issues = make([]*Issue, 0, 16)
|
||||
gas.stats = &Metrics{}
|
||||
}
|
@ -11,7 +11,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
package gas
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
@ -19,23 +19,23 @@ import (
|
||||
|
||||
type set map[string]bool
|
||||
|
||||
/// CallList is used to check for usage of specific packages
|
||||
/// and functions.
|
||||
// CallList is used to check for usage of specific packages
|
||||
// and functions.
|
||||
type CallList map[string]set
|
||||
|
||||
/// NewCallList creates a new empty CallList
|
||||
// NewCallList creates a new empty CallList
|
||||
func NewCallList() CallList {
|
||||
return make(CallList)
|
||||
}
|
||||
|
||||
/// AddAll will add several calls to the call list at once
|
||||
// AddAll will add several calls to the call list at once
|
||||
func (c CallList) AddAll(selector string, idents ...string) {
|
||||
for _, ident := range idents {
|
||||
c.Add(selector, ident)
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a selector and call to the call list
|
||||
// Add a selector and call to the call list
|
||||
func (c CallList) Add(selector, ident string) {
|
||||
if _, ok := c[selector]; !ok {
|
||||
c[selector] = make(set)
|
||||
@ -43,7 +43,7 @@ func (c CallList) Add(selector, ident string) {
|
||||
c[selector][ident] = true
|
||||
}
|
||||
|
||||
/// Contains returns true if the package and function are
|
||||
// Contains returns true if the package and function are
|
||||
/// members of this call list.
|
||||
func (c CallList) Contains(selector, ident string) bool {
|
||||
if idents, ok := c[selector]; ok {
|
||||
@ -53,21 +53,26 @@ func (c CallList) Contains(selector, ident string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
/// ContainsCallExpr resolves the call expression name and type
|
||||
// ContainsCallExpr resolves the call expression name and type
|
||||
/// or package and determines if it exists within the CallList
|
||||
func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) bool {
|
||||
func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) *ast.CallExpr {
|
||||
selector, ident, err := GetCallInfo(n, ctx)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
// Try direct resolution
|
||||
if c.Contains(selector, ident) {
|
||||
return true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Also support explicit path
|
||||
if path, ok := GetImportPath(selector, ctx); ok {
|
||||
return c.Contains(path, ident)
|
||||
// Use only explicit path to reduce conflicts
|
||||
if path, ok := GetImportPath(selector, ctx); ok && c.Contains(path, ident) {
|
||||
return n.(*ast.CallExpr)
|
||||
}
|
||||
return false
|
||||
|
||||
/*
|
||||
// Try direct resolution
|
||||
if c.Contains(selector, ident) {
|
||||
log.Printf("c.Contains == true, %s, %s.", selector, ident)
|
||||
return n.(*ast.CallExpr)
|
||||
}
|
||||
*/
|
||||
|
||||
return nil
|
||||
}
|
254
tools/vendor/github.com/GoASTScanner/gas/cmd/gas/main.go
generated
vendored
Normal file
254
tools/vendor/github.com/GoASTScanner/gas/cmd/gas/main.go
generated
vendored
Normal file
@ -0,0 +1,254 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
"github.com/GoASTScanner/gas/output"
|
||||
"github.com/GoASTScanner/gas/rules"
|
||||
"github.com/kisielk/gotool"
|
||||
)
|
||||
|
||||
const (
|
||||
usageText = `
|
||||
GAS - Go AST Scanner
|
||||
|
||||
Gas analyzes Go source code to look for common programming mistakes that
|
||||
can lead to security problems.
|
||||
|
||||
USAGE:
|
||||
|
||||
# Check a single package
|
||||
$ gas $GOPATH/src/github.com/example/project
|
||||
|
||||
# Check all packages under the current directory and save results in
|
||||
# json format.
|
||||
$ gas -fmt=json -out=results.json ./...
|
||||
|
||||
# Run a specific set of rules (by default all rules will be run):
|
||||
$ gas -include=G101,G203,G401 ./...
|
||||
|
||||
# Run all rules except the provided
|
||||
$ gas -exclude=G101 $GOPATH/src/github.com/example/project/...
|
||||
|
||||
`
|
||||
)
|
||||
|
||||
var (
|
||||
// #nosec flag
|
||||
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
||||
|
||||
// format output
|
||||
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, yaml, csv, junit-xml, html, or text")
|
||||
|
||||
// output file
|
||||
flagOutput = flag.String("out", "", "Set output file for results")
|
||||
|
||||
// config file
|
||||
flagConfig = flag.String("conf", "", "Path to optional config file")
|
||||
|
||||
// quiet
|
||||
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
||||
|
||||
// rules to explicitly include
|
||||
flagRulesInclude = flag.String("include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
||||
|
||||
// rules to explicitly exclude
|
||||
flagRulesExclude = flag.String("exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
||||
|
||||
// log to file or stderr
|
||||
flagLogfile = flag.String("log", "", "Log messages to file rather than stderr")
|
||||
|
||||
// sort the issues by severity
|
||||
flagSortIssues = flag.Bool("sort", true, "Sort issues by severity")
|
||||
|
||||
logger *log.Logger
|
||||
)
|
||||
|
||||
// #nosec
|
||||
func usage() {
|
||||
|
||||
fmt.Fprintln(os.Stderr, usageText)
|
||||
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
||||
flag.PrintDefaults()
|
||||
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
||||
|
||||
// sorted rule list for ease of reading
|
||||
rl := rules.Generate()
|
||||
keys := make([]string, 0, len(rl))
|
||||
for key := range rl {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
v := rl[k]
|
||||
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.Description)
|
||||
}
|
||||
fmt.Fprint(os.Stderr, "\n")
|
||||
}
|
||||
|
||||
func loadConfig(configFile string) (gas.Config, error) {
|
||||
config := gas.NewConfig()
|
||||
if configFile != "" {
|
||||
file, err := os.Open(configFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
if _, err := config.ReadFrom(file); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if *flagIgnoreNoSec {
|
||||
config.SetGlobal("nosec", "true")
|
||||
}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func loadRules(include, exclude string) rules.RuleList {
|
||||
var filters []rules.RuleFilter
|
||||
if include != "" {
|
||||
logger.Printf("including rules: %s", include)
|
||||
including := strings.Split(include, ",")
|
||||
filters = append(filters, rules.NewRuleFilter(false, including...))
|
||||
} else {
|
||||
logger.Println("including rules: default")
|
||||
}
|
||||
|
||||
if exclude != "" {
|
||||
logger.Printf("excluding rules: %s", exclude)
|
||||
excluding := strings.Split(exclude, ",")
|
||||
filters = append(filters, rules.NewRuleFilter(true, excluding...))
|
||||
} else {
|
||||
logger.Println("excluding rules: default")
|
||||
}
|
||||
return rules.Generate(filters...)
|
||||
}
|
||||
|
||||
func saveOutput(filename, format string, issues []*gas.Issue, metrics *gas.Metrics) error {
|
||||
if filename != "" {
|
||||
outfile, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outfile.Close()
|
||||
err = output.CreateReport(outfile, format, issues, metrics)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
err := output.CreateReport(os.Stdout, format, issues, metrics)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
// Setup usage description
|
||||
flag.Usage = usage
|
||||
|
||||
// Parse command line arguments
|
||||
flag.Parse()
|
||||
|
||||
// Ensure at least one file was specified
|
||||
if flag.NArg() == 0 {
|
||||
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n") // #nosec
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Setup logging
|
||||
logWriter := os.Stderr
|
||||
if *flagLogfile != "" {
|
||||
var e error
|
||||
logWriter, e = os.Create(*flagLogfile)
|
||||
if e != nil {
|
||||
flag.Usage()
|
||||
log.Fatal(e)
|
||||
}
|
||||
}
|
||||
logger = log.New(logWriter, "[gas] ", log.LstdFlags)
|
||||
|
||||
// Load config
|
||||
config, err := loadConfig(*flagConfig)
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
|
||||
// Load enabled rule definitions
|
||||
ruleDefinitions := loadRules(*flagRulesInclude, *flagRulesExclude)
|
||||
if len(ruleDefinitions) <= 0 {
|
||||
logger.Fatal("cannot continue: no rules are configured.")
|
||||
}
|
||||
|
||||
// Create the analyzer
|
||||
analyzer := gas.NewAnalyzer(config, logger)
|
||||
analyzer.LoadRules(ruleDefinitions.Builders()...)
|
||||
|
||||
vendor := regexp.MustCompile(`[\\/]vendor([\\/]|$)`)
|
||||
|
||||
var packages []string
|
||||
// Iterate over packages on the import paths
|
||||
for _, pkg := range gotool.ImportPaths(flag.Args()) {
|
||||
|
||||
// Skip vendor directory
|
||||
if vendor.MatchString(pkg) {
|
||||
continue
|
||||
}
|
||||
packages = append(packages, pkg)
|
||||
}
|
||||
|
||||
if err := analyzer.Process(packages...); err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
|
||||
// Collect the results
|
||||
issues, metrics := analyzer.Report()
|
||||
|
||||
issuesFound := len(issues) > 0
|
||||
// Exit quietly if nothing was found
|
||||
if !issuesFound && *flagQuiet {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Sort the issue by severity
|
||||
if *flagSortIssues {
|
||||
sortIssues(issues)
|
||||
}
|
||||
|
||||
// Create output report
|
||||
if err := saveOutput(*flagOutput, *flagFormat, issues, metrics); err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
|
||||
// Finialize logging
|
||||
logWriter.Close() // #nosec
|
||||
|
||||
// Do we have an issue? If so exit 1
|
||||
if issuesFound {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
20
tools/vendor/github.com/GoASTScanner/gas/cmd/gas/sort_issues.go
generated
vendored
Normal file
20
tools/vendor/github.com/GoASTScanner/gas/cmd/gas/sort_issues.go
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type sortBySeverity []*gas.Issue
|
||||
|
||||
func (s sortBySeverity) Len() int { return len(s) }
|
||||
|
||||
func (s sortBySeverity) Less(i, j int) bool { return s[i].Severity > s[i].Severity }
|
||||
|
||||
func (s sortBySeverity) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
|
||||
// sortIssues sorts the issues by severity in descending order
|
||||
func sortIssues(issues []*gas.Issue) {
|
||||
sort.Sort(sortBySeverity(issues))
|
||||
}
|
88
tools/vendor/github.com/GoASTScanner/gas/config.go
generated
vendored
Normal file
88
tools/vendor/github.com/GoASTScanner/gas/config.go
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
package gas
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
const (
|
||||
// Globals are applicable to all rules and used for general
|
||||
// configuration settings for gas.
|
||||
Globals = "global"
|
||||
)
|
||||
|
||||
// Config is used to provide configuration and customization to each of the rules.
|
||||
type Config map[string]interface{}
|
||||
|
||||
// NewConfig initializes a new configuration instance. The configuration data then
|
||||
// needs to be loaded via c.ReadFrom(strings.NewReader("config data"))
|
||||
// or from a *os.File.
|
||||
func NewConfig() Config {
|
||||
cfg := make(Config)
|
||||
cfg[Globals] = make(map[string]string)
|
||||
return cfg
|
||||
}
|
||||
|
||||
// ReadFrom implements the io.ReaderFrom interface. This
|
||||
// should be used with io.Reader to load configuration from
|
||||
//file or from string etc.
|
||||
func (c Config) ReadFrom(r io.Reader) (int64, error) {
|
||||
data, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return int64(len(data)), err
|
||||
}
|
||||
if err = json.Unmarshal(data, &c); err != nil {
|
||||
return int64(len(data)), err
|
||||
}
|
||||
return int64(len(data)), nil
|
||||
}
|
||||
|
||||
// WriteTo implements the io.WriteTo interface. This should
|
||||
// be used to save or print out the configuration information.
|
||||
func (c Config) WriteTo(w io.Writer) (int64, error) {
|
||||
data, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return int64(len(data)), err
|
||||
}
|
||||
return io.Copy(w, bytes.NewReader(data))
|
||||
}
|
||||
|
||||
// Get returns the configuration section for the supplied key
|
||||
func (c Config) Get(section string) (interface{}, error) {
|
||||
settings, found := c[section]
|
||||
if !found {
|
||||
return nil, fmt.Errorf("Section %s not in configuration", section)
|
||||
}
|
||||
return settings, nil
|
||||
}
|
||||
|
||||
// Set section in the configuration to specified value
|
||||
func (c Config) Set(section string, value interface{}) {
|
||||
c[section] = value
|
||||
}
|
||||
|
||||
// GetGlobal returns value associated with global configuration option
|
||||
func (c Config) GetGlobal(option string) (string, error) {
|
||||
if globals, ok := c[Globals]; ok {
|
||||
if settings, ok := globals.(map[string]string); ok {
|
||||
if value, ok := settings[option]; ok {
|
||||
return value, nil
|
||||
}
|
||||
return "", fmt.Errorf("global setting for %s not found", option)
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("no global config options found")
|
||||
|
||||
}
|
||||
|
||||
// SetGlobal associates a value with a global configuration ooption
|
||||
func (c Config) SetGlobal(option, value string) {
|
||||
if globals, ok := c[Globals]; ok {
|
||||
if settings, ok := globals.(map[string]string); ok {
|
||||
settings[option] = value
|
||||
}
|
||||
}
|
||||
}
|
235
tools/vendor/github.com/GoASTScanner/gas/core/analyzer.go
generated
vendored
235
tools/vendor/github.com/GoASTScanner/gas/core/analyzer.go
generated
vendored
@ -1,235 +0,0 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package core holds the central scanning logic used by GAS
|
||||
package core
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/importer"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ImportInfo is used to track aliased and initialization only imports.
|
||||
type ImportInfo struct {
|
||||
Imported map[string]string
|
||||
Aliased map[string]string
|
||||
InitOnly map[string]bool
|
||||
}
|
||||
|
||||
func NewImportInfo() *ImportInfo {
|
||||
return &ImportInfo{
|
||||
make(map[string]string),
|
||||
make(map[string]string),
|
||||
make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
// The Context is populated with data parsed from the source code as it is scanned.
|
||||
// It is passed through to all rule functions as they are called. Rules may use
|
||||
// this data in conjunction withe the encoutered AST node.
|
||||
type Context struct {
|
||||
FileSet *token.FileSet
|
||||
Comments ast.CommentMap
|
||||
Info *types.Info
|
||||
Pkg *types.Package
|
||||
Root *ast.File
|
||||
Config map[string]interface{}
|
||||
Imports *ImportInfo
|
||||
}
|
||||
|
||||
// The Rule interface used by all rules supported by GAS.
|
||||
type Rule interface {
|
||||
Match(ast.Node, *Context) (*Issue, error)
|
||||
}
|
||||
|
||||
// A RuleSet maps lists of rules to the type of AST node they should be run on.
|
||||
// The anaylzer will only invoke rules contained in the list associated with the
|
||||
// type of AST node it is currently visiting.
|
||||
type RuleSet map[reflect.Type][]Rule
|
||||
|
||||
// Metrics used when reporting information about a scanning run.
|
||||
type Metrics struct {
|
||||
NumFiles int `json:"files"`
|
||||
NumLines int `json:"lines"`
|
||||
NumNosec int `json:"nosec"`
|
||||
NumFound int `json:"found"`
|
||||
}
|
||||
|
||||
// The Analyzer object is the main object of GAS. It has methods traverse an AST
|
||||
// and invoke the correct checking rules as on each node as required.
|
||||
type Analyzer struct {
|
||||
ignoreNosec bool
|
||||
ruleset RuleSet
|
||||
context *Context
|
||||
logger *log.Logger
|
||||
Issues []*Issue `json:"issues"`
|
||||
Stats *Metrics `json:"metrics"`
|
||||
}
|
||||
|
||||
// NewAnalyzer builds a new anaylzer.
|
||||
func NewAnalyzer(conf map[string]interface{}, logger *log.Logger) Analyzer {
|
||||
if logger == nil {
|
||||
logger = log.New(os.Stdout, "[gas]", 0)
|
||||
}
|
||||
a := Analyzer{
|
||||
ignoreNosec: conf["ignoreNosec"].(bool),
|
||||
ruleset: make(RuleSet),
|
||||
context: &Context{nil, nil, nil, nil, nil, nil, nil},
|
||||
logger: logger,
|
||||
Issues: make([]*Issue, 0, 16),
|
||||
Stats: &Metrics{0, 0, 0, 0},
|
||||
}
|
||||
|
||||
// TODO(tkelsey): use the inc/exc lists
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
func (gas *Analyzer) process(filename string, source interface{}) error {
|
||||
mode := parser.ParseComments
|
||||
gas.context.FileSet = token.NewFileSet()
|
||||
root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode)
|
||||
if err == nil {
|
||||
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments)
|
||||
gas.context.Root = root
|
||||
|
||||
// here we get type info
|
||||
gas.context.Info = &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
Defs: make(map[*ast.Ident]types.Object),
|
||||
Uses: make(map[*ast.Ident]types.Object),
|
||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||
Scopes: make(map[ast.Node]*types.Scope),
|
||||
Implicits: make(map[ast.Node]types.Object),
|
||||
}
|
||||
|
||||
conf := types.Config{Importer: importer.Default()}
|
||||
gas.context.Pkg, err = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info)
|
||||
if err != nil {
|
||||
// TODO(gm) Type checker not currently considering all files within a package
|
||||
// see: issue #113
|
||||
gas.logger.Printf(`Error during type checking: "%s"`, err)
|
||||
err = nil
|
||||
}
|
||||
|
||||
gas.context.Imports = NewImportInfo()
|
||||
for _, pkg := range gas.context.Pkg.Imports() {
|
||||
gas.context.Imports.Imported[pkg.Path()] = pkg.Name()
|
||||
}
|
||||
ast.Walk(gas, root)
|
||||
gas.Stats.NumFiles++
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// AddRule adds a rule into a rule set list mapped to the given AST node's type.
|
||||
// The node is only needed for its type and is not otherwise used.
|
||||
func (gas *Analyzer) AddRule(r Rule, nodes []ast.Node) {
|
||||
for _, n := range nodes {
|
||||
t := reflect.TypeOf(n)
|
||||
if val, ok := gas.ruleset[t]; ok {
|
||||
gas.ruleset[t] = append(val, r)
|
||||
} else {
|
||||
gas.ruleset[t] = []Rule{r}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process reads in a source file, convert it to an AST and traverse it.
|
||||
// Rule methods added with AddRule will be invoked as necessary.
|
||||
func (gas *Analyzer) Process(filename string) error {
|
||||
err := gas.process(filename, nil)
|
||||
fun := func(f *token.File) bool {
|
||||
gas.Stats.NumLines += f.LineCount()
|
||||
return true
|
||||
}
|
||||
gas.context.FileSet.Iterate(fun)
|
||||
return err
|
||||
}
|
||||
|
||||
// ProcessSource will convert a source code string into an AST and traverse it.
|
||||
// Rule methods added with AddRule will be invoked as necessary. The string is
|
||||
// identified by the filename given but no file IO will be done.
|
||||
func (gas *Analyzer) ProcessSource(filename string, source string) error {
|
||||
err := gas.process(filename, source)
|
||||
fun := func(f *token.File) bool {
|
||||
gas.Stats.NumLines += f.LineCount()
|
||||
return true
|
||||
}
|
||||
gas.context.FileSet.Iterate(fun)
|
||||
return err
|
||||
}
|
||||
|
||||
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
|
||||
func (gas *Analyzer) ignore(n ast.Node) bool {
|
||||
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
|
||||
for _, group := range groups {
|
||||
if strings.Contains(group.Text(), "#nosec") {
|
||||
gas.Stats.NumNosec++
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Visit runs the GAS visitor logic over an AST created by parsing go code.
|
||||
// Rule methods added with AddRule will be invoked as necessary.
|
||||
func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
|
||||
if !gas.ignore(n) {
|
||||
|
||||
// Track aliased and initialization imports
|
||||
if imported, ok := n.(*ast.ImportSpec); ok {
|
||||
path := strings.Trim(imported.Path.Value, `"`)
|
||||
if imported.Name != nil {
|
||||
if imported.Name.Name == "_" {
|
||||
// Initialization import
|
||||
gas.context.Imports.InitOnly[path] = true
|
||||
} else {
|
||||
// Aliased import
|
||||
gas.context.Imports.Aliased[path] = imported.Name.Name
|
||||
}
|
||||
}
|
||||
// unsafe is not included in Package.Imports()
|
||||
if path == "unsafe" {
|
||||
gas.context.Imports.Imported[path] = path
|
||||
}
|
||||
}
|
||||
|
||||
if val, ok := gas.ruleset[reflect.TypeOf(n)]; ok {
|
||||
for _, rule := range val {
|
||||
ret, err := rule.Match(n, gas.context)
|
||||
if err != nil {
|
||||
file, line := GetLocation(n, gas.context)
|
||||
file = path.Base(file)
|
||||
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
||||
}
|
||||
if ret != nil {
|
||||
gas.Issues = append(gas.Issues, ret)
|
||||
gas.Stats.NumFound++
|
||||
}
|
||||
}
|
||||
}
|
||||
return gas
|
||||
}
|
||||
return nil
|
||||
}
|
404
tools/vendor/github.com/GoASTScanner/gas/core/select.go
generated
vendored
404
tools/vendor/github.com/GoASTScanner/gas/core/select.go
generated
vendored
@ -1,404 +0,0 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// SelectFunc is like an AST visitor, but has a richer interface. It
|
||||
// is called with the current ast.Node being visitied and that nodes depth in
|
||||
// the tree. The function can return true to continue traversing the tree, or
|
||||
// false to end traversal here.
|
||||
type SelectFunc func(ast.Node, int) bool
|
||||
|
||||
func walkIdentList(list []*ast.Ident, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkExprList(list []ast.Expr, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkStmtList(list []ast.Stmt, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkDeclList(list []ast.Decl, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func depthWalk(node ast.Node, depth int, fun SelectFunc) {
|
||||
if !fun(node, depth) {
|
||||
return
|
||||
}
|
||||
|
||||
switch n := node.(type) {
|
||||
// Comments and fields
|
||||
case *ast.Comment:
|
||||
|
||||
case *ast.CommentGroup:
|
||||
for _, c := range n.List {
|
||||
depthWalk(c, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.Field:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
walkIdentList(n.Names, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Tag != nil {
|
||||
depthWalk(n.Tag, depth+1, fun)
|
||||
}
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FieldList:
|
||||
for _, f := range n.List {
|
||||
depthWalk(f, depth+1, fun)
|
||||
}
|
||||
|
||||
// Expressions
|
||||
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
|
||||
|
||||
case *ast.Ellipsis:
|
||||
if n.Elt != nil {
|
||||
depthWalk(n.Elt, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FuncLit:
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.CompositeLit:
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
walkExprList(n.Elts, depth+1, fun)
|
||||
|
||||
case *ast.ParenExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Sel, depth+1, fun)
|
||||
|
||||
case *ast.IndexExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Index, depth+1, fun)
|
||||
|
||||
case *ast.SliceExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
if n.Low != nil {
|
||||
depthWalk(n.Low, depth+1, fun)
|
||||
}
|
||||
if n.High != nil {
|
||||
depthWalk(n.High, depth+1, fun)
|
||||
}
|
||||
if n.Max != nil {
|
||||
depthWalk(n.Max, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.TypeAssertExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
depthWalk(n.Fun, depth+1, fun)
|
||||
walkExprList(n.Args, depth+1, fun)
|
||||
|
||||
case *ast.StarExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.UnaryExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Y, depth+1, fun)
|
||||
|
||||
case *ast.KeyValueExpr:
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
// Types
|
||||
case *ast.ArrayType:
|
||||
if n.Len != nil {
|
||||
depthWalk(n.Len, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Elt, depth+1, fun)
|
||||
|
||||
case *ast.StructType:
|
||||
depthWalk(n.Fields, depth+1, fun)
|
||||
|
||||
case *ast.FuncType:
|
||||
if n.Params != nil {
|
||||
depthWalk(n.Params, depth+1, fun)
|
||||
}
|
||||
if n.Results != nil {
|
||||
depthWalk(n.Results, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.InterfaceType:
|
||||
depthWalk(n.Methods, depth+1, fun)
|
||||
|
||||
case *ast.MapType:
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
case *ast.ChanType:
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
// Statements
|
||||
case *ast.BadStmt:
|
||||
|
||||
case *ast.DeclStmt:
|
||||
depthWalk(n.Decl, depth+1, fun)
|
||||
|
||||
case *ast.EmptyStmt:
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
depthWalk(n.Label, depth+1, fun)
|
||||
depthWalk(n.Stmt, depth+1, fun)
|
||||
|
||||
case *ast.ExprStmt:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.SendStmt:
|
||||
depthWalk(n.Chan, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
case *ast.IncDecStmt:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.AssignStmt:
|
||||
walkExprList(n.Lhs, depth+1, fun)
|
||||
walkExprList(n.Rhs, depth+1, fun)
|
||||
|
||||
case *ast.GoStmt:
|
||||
depthWalk(n.Call, depth+1, fun)
|
||||
|
||||
case *ast.DeferStmt:
|
||||
depthWalk(n.Call, depth+1, fun)
|
||||
|
||||
case *ast.ReturnStmt:
|
||||
walkExprList(n.Results, depth+1, fun)
|
||||
|
||||
case *ast.BranchStmt:
|
||||
if n.Label != nil {
|
||||
depthWalk(n.Label, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.BlockStmt:
|
||||
walkStmtList(n.List, depth+1, fun)
|
||||
|
||||
case *ast.IfStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Cond, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
if n.Else != nil {
|
||||
depthWalk(n.Else, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.CaseClause:
|
||||
walkExprList(n.List, depth+1, fun)
|
||||
walkStmtList(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
if n.Tag != nil {
|
||||
depthWalk(n.Tag, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Assign, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.CommClause:
|
||||
if n.Comm != nil {
|
||||
depthWalk(n.Comm, depth+1, fun)
|
||||
}
|
||||
walkStmtList(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.SelectStmt:
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.ForStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
if n.Cond != nil {
|
||||
depthWalk(n.Cond, depth+1, fun)
|
||||
}
|
||||
if n.Post != nil {
|
||||
depthWalk(n.Post, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.RangeStmt:
|
||||
if n.Key != nil {
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
}
|
||||
if n.Value != nil {
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
// Declarations
|
||||
case *ast.ImportSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
if n.Name != nil {
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Path, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.ValueSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
walkIdentList(n.Names, depth+1, fun)
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
walkExprList(n.Values, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.TypeSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.BadDecl:
|
||||
|
||||
case *ast.GenDecl:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
for _, s := range n.Specs {
|
||||
depthWalk(s, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FuncDecl:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
if n.Recv != nil {
|
||||
depthWalk(n.Recv, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Body != nil {
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
}
|
||||
|
||||
// Files and packages
|
||||
case *ast.File:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
walkDeclList(n.Decls, depth+1, fun)
|
||||
// don't walk n.Comments - they have been
|
||||
// visited already through the individual
|
||||
// nodes
|
||||
|
||||
case *ast.Package:
|
||||
for _, f := range n.Files {
|
||||
depthWalk(f, depth+1, fun)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("gas.depthWalk: unexpected node type %T", n))
|
||||
}
|
||||
}
|
||||
|
||||
type Selector interface {
|
||||
Final(ast.Node)
|
||||
Partial(ast.Node) bool
|
||||
}
|
||||
|
||||
func Select(s Selector, n ast.Node, bits ...reflect.Type) {
|
||||
fun := func(n ast.Node, d int) bool {
|
||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
||||
if d == len(bits)-1 {
|
||||
s.Final(n)
|
||||
return false
|
||||
} else if s.Partial(n) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
depthWalk(n, 0, fun)
|
||||
}
|
||||
|
||||
// SimpleSelect will try to match a path through a sub-tree starting at a given AST node.
|
||||
// The type of each node in the path at a given depth must match its entry in list of
|
||||
// node types given.
|
||||
func SimpleSelect(n ast.Node, bits ...reflect.Type) ast.Node {
|
||||
var found ast.Node
|
||||
fun := func(n ast.Node, d int) bool {
|
||||
if found != nil {
|
||||
return false // short cut logic if we have found a match
|
||||
}
|
||||
|
||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
||||
if d == len(bits)-1 {
|
||||
found = n
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
depthWalk(n, 0, fun)
|
||||
return found
|
||||
}
|
@ -12,41 +12,16 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
package gas
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// helpfull "canned" matching routines ----------------------------------------
|
||||
|
||||
func selectName(n ast.Node, s reflect.Type) (string, bool) {
|
||||
t := reflect.TypeOf(&ast.SelectorExpr{})
|
||||
if node, ok := SimpleSelect(n, s, t).(*ast.SelectorExpr); ok {
|
||||
t = reflect.TypeOf(&ast.Ident{})
|
||||
if ident, ok := SimpleSelect(node.X, t).(*ast.Ident); ok {
|
||||
return strings.Join([]string{ident.Name, node.Sel.Name}, "."), ok
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// MatchCall will match an ast.CallNode if its method name obays the given regex.
|
||||
func MatchCall(n ast.Node, r *regexp.Regexp) *ast.CallExpr {
|
||||
t := reflect.TypeOf(&ast.CallExpr{})
|
||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
||||
return n.(*ast.CallExpr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MatchCallByPackage ensures that the specified package is imported,
|
||||
// adjusts the name for any aliases and ignores cases that are
|
||||
// initialization only imports.
|
||||
@ -100,11 +75,13 @@ func MatchCallByType(n ast.Node, ctx *Context, requiredType string, calls ...str
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// MatchCompLit will match an ast.CompositeLit if its string value obays the given regex.
|
||||
func MatchCompLit(n ast.Node, r *regexp.Regexp) *ast.CompositeLit {
|
||||
t := reflect.TypeOf(&ast.CompositeLit{})
|
||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
||||
return n.(*ast.CompositeLit)
|
||||
// MatchCompLit will match an ast.CompositeLit based on the supplied type
|
||||
func MatchCompLit(n ast.Node, ctx *Context, required string) *ast.CompositeLit {
|
||||
if complit, ok := n.(*ast.CompositeLit); ok {
|
||||
typeOf := ctx.Info.TypeOf(complit)
|
||||
if typeOf.String() == required {
|
||||
return complit
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -117,7 +94,7 @@ func GetInt(n ast.Node) (int64, error) {
|
||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a float value from an ast.BasicLit
|
||||
// GetFloat will read and return a float value from an ast.BasicLit
|
||||
func GetFloat(n ast.Node) (float64, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT {
|
||||
return strconv.ParseFloat(node.Value, 64)
|
||||
@ -125,7 +102,7 @@ func GetFloat(n ast.Node) (float64, error) {
|
||||
return 0.0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a char value from an ast.BasicLit
|
||||
// GetChar will read and return a char value from an ast.BasicLit
|
||||
func GetChar(n ast.Node) (byte, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR {
|
||||
return node.Value[0], nil
|
||||
@ -133,7 +110,7 @@ func GetChar(n ast.Node) (byte, error) {
|
||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a string value from an ast.BasicLit
|
||||
// GetString will read and return a string value from an ast.BasicLit
|
||||
func GetString(n ast.Node) (string, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING {
|
||||
return strconv.Unquote(node.Value)
|
||||
@ -170,12 +147,10 @@ func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) {
|
||||
t := ctx.Info.TypeOf(expr)
|
||||
if t != nil {
|
||||
return t.String(), fn.Sel.Name, nil
|
||||
} else {
|
||||
return "undefined", fn.Sel.Name, fmt.Errorf("missing type info")
|
||||
}
|
||||
} else {
|
||||
return expr.Name, fn.Sel.Name, nil
|
||||
return "undefined", fn.Sel.Name, fmt.Errorf("missing type info")
|
||||
}
|
||||
return expr.Name, fn.Sel.Name, nil
|
||||
}
|
||||
case *ast.Ident:
|
||||
return ctx.Pkg.Name(), fn.Name, nil
|
||||
@ -205,7 +180,7 @@ func GetImportedName(path string, ctx *Context) (string, bool) {
|
||||
// GetImportPath resolves the full import path of an identifer based on
|
||||
// the imports in the current context.
|
||||
func GetImportPath(name string, ctx *Context) (string, bool) {
|
||||
for path, _ := range ctx.Imports.Imported {
|
||||
for path := range ctx.Imports.Imported {
|
||||
if imported, ok := GetImportedName(path, ctx); ok && imported == name {
|
||||
return path, true
|
||||
}
|
67
tools/vendor/github.com/GoASTScanner/gas/import_tracker.go
generated
vendored
Normal file
67
tools/vendor/github.com/GoASTScanner/gas/import_tracker.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gas
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ImportTracker is used to normalize the packages that have been imported
|
||||
// by a source file. It is able to differentiate between plain imports, aliased
|
||||
// imports and init only imports.
|
||||
type ImportTracker struct {
|
||||
Imported map[string]string
|
||||
Aliased map[string]string
|
||||
InitOnly map[string]bool
|
||||
}
|
||||
|
||||
// NewImportTracker creates an empty Import tracker instance
|
||||
func NewImportTracker() *ImportTracker {
|
||||
return &ImportTracker{
|
||||
make(map[string]string),
|
||||
make(map[string]string),
|
||||
make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
// TrackPackages tracks all the imports used by the supplied packages
|
||||
func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) {
|
||||
for _, pkg := range pkgs {
|
||||
t.Imported[pkg.Path()] = pkg.Name()
|
||||
// Transient imports
|
||||
//for _, imp := range pkg.Imports() {
|
||||
// t.Imported[imp.Path()] = imp.Name()
|
||||
//}
|
||||
}
|
||||
}
|
||||
|
||||
// TrackImport tracks imports and handles the 'unsafe' import
|
||||
func (t *ImportTracker) TrackImport(n ast.Node) {
|
||||
if imported, ok := n.(*ast.ImportSpec); ok {
|
||||
path := strings.Trim(imported.Path.Value, `"`)
|
||||
if imported.Name != nil {
|
||||
if imported.Name.Name == "_" {
|
||||
// Initialization only import
|
||||
t.InitOnly[path] = true
|
||||
} else {
|
||||
// Aliased import
|
||||
t.Aliased[path] = imported.Name.Name
|
||||
}
|
||||
}
|
||||
if path == "unsafe" {
|
||||
t.Imported[path] = path
|
||||
}
|
||||
}
|
||||
}
|
@ -11,32 +11,37 @@
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package core
|
||||
|
||||
package gas
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"os"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Score type used by severity and confidence values
|
||||
type Score int
|
||||
|
||||
const (
|
||||
Low Score = iota // Low value
|
||||
Medium // Medium value
|
||||
High // High value
|
||||
// Low severity or confidence
|
||||
Low Score = iota
|
||||
// Medium severity or confidence
|
||||
Medium
|
||||
// High severity or confidence
|
||||
High
|
||||
)
|
||||
|
||||
// An Issue is returnd by a GAS rule if it discovers an issue with the scanned code.
|
||||
// Issue is returnd by a GAS rule if it discovers an issue with the scanned code.
|
||||
type Issue struct {
|
||||
Severity Score `json:"severity"` // issue severity (how problematic it is)
|
||||
Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it)
|
||||
What string `json:"details"` // Human readable explanation
|
||||
File string `json:"file"` // File name we found it in
|
||||
Code string `json:"code"` // Impacted code line
|
||||
Line int `json:"line"` // Line number in file
|
||||
Line string `json:"line"` // Line number in file
|
||||
}
|
||||
|
||||
// MetaData is embedded in all GAS rules. The Severity, Confidence and What message
|
||||
@ -71,7 +76,7 @@ func codeSnippet(file *os.File, start int64, end int64, n ast.Node) (string, err
|
||||
}
|
||||
|
||||
size := (int)(end - start) // Go bug, os.File.Read should return int64 ...
|
||||
file.Seek(start, 0)
|
||||
file.Seek(start, 0) // #nosec
|
||||
|
||||
buf := make([]byte, size)
|
||||
if nread, err := file.Read(buf); err != nil || nread != size {
|
||||
@ -85,7 +90,12 @@ func NewIssue(ctx *Context, node ast.Node, desc string, severity Score, confiden
|
||||
var code string
|
||||
fobj := ctx.FileSet.File(node.Pos())
|
||||
name := fobj.Name()
|
||||
line := fobj.Line(node.Pos())
|
||||
|
||||
start, end := fobj.Line(node.Pos()), fobj.Line(node.End())
|
||||
line := strconv.Itoa(start)
|
||||
if start != end {
|
||||
line = fmt.Sprintf("%d-%d", start, end)
|
||||
}
|
||||
|
||||
if file, err := os.Open(fobj.Name()); err == nil {
|
||||
defer file.Close()
|
293
tools/vendor/github.com/GoASTScanner/gas/main.go
generated
vendored
293
tools/vendor/github.com/GoASTScanner/gas/main.go
generated
vendored
@ -1,293 +0,0 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas/output"
|
||||
)
|
||||
|
||||
type recursion bool
|
||||
|
||||
const (
|
||||
recurse recursion = true
|
||||
noRecurse recursion = false
|
||||
)
|
||||
|
||||
var (
|
||||
// #nosec flag
|
||||
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
||||
|
||||
// format output
|
||||
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
|
||||
|
||||
// output file
|
||||
flagOutput = flag.String("out", "", "Set output file for results")
|
||||
|
||||
// config file
|
||||
flagConfig = flag.String("conf", "", "Path to optional config file")
|
||||
|
||||
// quiet
|
||||
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
||||
|
||||
usageText = `
|
||||
GAS - Go AST Scanner
|
||||
|
||||
Gas analyzes Go source code to look for common programming mistakes that
|
||||
can lead to security problems.
|
||||
|
||||
USAGE:
|
||||
|
||||
# Check a single Go file
|
||||
$ gas example.go
|
||||
|
||||
# Check all files under the current directory and save results in
|
||||
# json format.
|
||||
$ gas -fmt=json -out=results.json ./...
|
||||
|
||||
# Run a specific set of rules (by default all rules will be run):
|
||||
$ gas -include=G101,G203,G401 ./...
|
||||
|
||||
# Run all rules except the provided
|
||||
$ gas -exclude=G101 ./...
|
||||
|
||||
`
|
||||
|
||||
logger *log.Logger
|
||||
)
|
||||
|
||||
func extendConfList(conf map[string]interface{}, name string, inputStr string) {
|
||||
if inputStr == "" {
|
||||
conf[name] = []string{}
|
||||
} else {
|
||||
input := strings.Split(inputStr, ",")
|
||||
if val, ok := conf[name]; ok {
|
||||
if data, ok := val.(*[]string); ok {
|
||||
conf[name] = append(*data, input...)
|
||||
} else {
|
||||
logger.Fatal("Config item must be a string list: ", name)
|
||||
}
|
||||
} else {
|
||||
conf[name] = input
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func buildConfig(incRules string, excRules string) map[string]interface{} {
|
||||
config := make(map[string]interface{})
|
||||
if flagConfig != nil && *flagConfig != "" { // parse config if we have one
|
||||
if data, err := ioutil.ReadFile(*flagConfig); err == nil {
|
||||
if err := json.Unmarshal(data, &(config)); err != nil {
|
||||
logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err)
|
||||
}
|
||||
} else {
|
||||
logger.Fatal("Could not read config file: ", *flagConfig)
|
||||
}
|
||||
}
|
||||
|
||||
// add in CLI include and exclude data
|
||||
extendConfList(config, "include", incRules)
|
||||
extendConfList(config, "exclude", excRules)
|
||||
|
||||
// override ignoreNosec if given on CLI
|
||||
if flagIgnoreNoSec != nil {
|
||||
config["ignoreNosec"] = *flagIgnoreNoSec
|
||||
} else {
|
||||
val, ok := config["ignoreNosec"]
|
||||
if !ok {
|
||||
config["ignoreNosec"] = false
|
||||
} else if _, ok := val.(bool); !ok {
|
||||
logger.Fatal("Config value must be a bool: 'ignoreNosec'")
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
// #nosec
|
||||
func usage() {
|
||||
|
||||
fmt.Fprintln(os.Stderr, usageText)
|
||||
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
||||
flag.PrintDefaults()
|
||||
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
||||
|
||||
// sorted rule list for eas of reading
|
||||
rl := GetFullRuleList()
|
||||
keys := make([]string, 0, len(rl))
|
||||
for key := range rl {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
v := rl[k]
|
||||
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.description)
|
||||
}
|
||||
fmt.Fprint(os.Stderr, "\n")
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
// Setup usage description
|
||||
flag.Usage = usage
|
||||
|
||||
// Exclude files
|
||||
excluded := newFileList("*_test.go")
|
||||
flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match")
|
||||
|
||||
incRules := ""
|
||||
flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
||||
|
||||
excRules := ""
|
||||
flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
||||
|
||||
// Custom commands / utilities to run instead of default analyzer
|
||||
tools := newUtils()
|
||||
flag.Var(tools, "tool", "GAS utilities to assist with rule development")
|
||||
|
||||
// Setup logging
|
||||
logger = log.New(os.Stderr, "[gas] ", log.LstdFlags)
|
||||
|
||||
// Parse command line arguments
|
||||
flag.Parse()
|
||||
|
||||
// Ensure at least one file was specified
|
||||
if flag.NArg() == 0 {
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Run utils instead of analysis
|
||||
if len(tools.call) > 0 {
|
||||
tools.run(flag.Args()...)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Setup analyzer
|
||||
config := buildConfig(incRules, excRules)
|
||||
analyzer := gas.NewAnalyzer(config, logger)
|
||||
AddRules(&analyzer, config)
|
||||
|
||||
toAnalyze := getFilesToAnalyze(flag.Args(), excluded)
|
||||
|
||||
for _, file := range toAnalyze {
|
||||
logger.Printf(`Processing "%s"...`, file)
|
||||
if err := analyzer.Process(file); err != nil {
|
||||
logger.Printf(`Failed to process: "%s"`, file)
|
||||
logger.Println(err)
|
||||
logger.Fatalf(`Halting execution.`)
|
||||
}
|
||||
}
|
||||
|
||||
issuesFound := len(analyzer.Issues) > 0
|
||||
// Exit quietly if nothing was found
|
||||
if !issuesFound && *flagQuiet {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Create output report
|
||||
if *flagOutput != "" {
|
||||
outfile, err := os.Create(*flagOutput)
|
||||
if err != nil {
|
||||
logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err)
|
||||
}
|
||||
defer outfile.Close()
|
||||
output.CreateReport(outfile, *flagFormat, &analyzer)
|
||||
} else {
|
||||
output.CreateReport(os.Stdout, *flagFormat, &analyzer)
|
||||
}
|
||||
|
||||
// Do we have an issue? If so exit 1
|
||||
if issuesFound {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// getFilesToAnalyze lists all files
|
||||
func getFilesToAnalyze(paths []string, excluded *fileList) []string {
|
||||
//log.Println("getFilesToAnalyze: start")
|
||||
var toAnalyze []string
|
||||
for _, relativePath := range paths {
|
||||
//log.Printf("getFilesToAnalyze: processing \"%s\"\n", path)
|
||||
// get the absolute path before doing anything else
|
||||
path, err := filepath.Abs(relativePath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if filepath.Base(relativePath) == "..." {
|
||||
toAnalyze = append(
|
||||
toAnalyze,
|
||||
listFiles(filepath.Dir(path), recurse, excluded)...,
|
||||
)
|
||||
} else {
|
||||
var (
|
||||
finfo os.FileInfo
|
||||
err error
|
||||
)
|
||||
if finfo, err = os.Stat(path); err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
if !finfo.IsDir() {
|
||||
if shouldInclude(path, excluded) {
|
||||
toAnalyze = append(toAnalyze, path)
|
||||
}
|
||||
} else {
|
||||
toAnalyze = listFiles(path, noRecurse, excluded)
|
||||
}
|
||||
}
|
||||
}
|
||||
//log.Println("getFilesToAnalyze: end")
|
||||
return toAnalyze
|
||||
}
|
||||
|
||||
// listFiles returns a list of all files found that pass the shouldInclude check.
|
||||
// If doRecursiveWalk it true, it will walk the tree rooted at absPath, otherwise it
|
||||
// will only include files directly within the dir referenced by absPath.
|
||||
func listFiles(absPath string, doRecursiveWalk recursion, excluded *fileList) []string {
|
||||
var files []string
|
||||
|
||||
walk := func(path string, info os.FileInfo, err error) error {
|
||||
if info.IsDir() && doRecursiveWalk == noRecurse {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if shouldInclude(path, excluded) {
|
||||
files = append(files, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := filepath.Walk(absPath, walk); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
// shouldInclude checks if a specific path which is expected to reference
|
||||
// a regular file should be included
|
||||
func shouldInclude(path string, excluded *fileList) bool {
|
||||
return filepath.Ext(path) == ".go" && !excluded.Contains(path)
|
||||
}
|
74
tools/vendor/github.com/GoASTScanner/gas/output/formatter.go
generated
vendored
74
tools/vendor/github.com/GoASTScanner/gas/output/formatter.go
generated
vendored
@ -17,21 +17,30 @@ package output
|
||||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
htmlTemplate "html/template"
|
||||
"io"
|
||||
"strconv"
|
||||
plainTemplate "text/template"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// The output format for reported issues
|
||||
// ReportFormat enumrates the output format for reported issues
|
||||
type ReportFormat int
|
||||
|
||||
const (
|
||||
// ReportText is the default format that writes to stdout
|
||||
ReportText ReportFormat = iota // Plain text format
|
||||
ReportJSON // Json format
|
||||
ReportCSV // CSV format
|
||||
|
||||
// ReportJSON set the output format to json
|
||||
ReportJSON // Json format
|
||||
|
||||
// ReportCSV set the output format to csv
|
||||
ReportCSV // CSV format
|
||||
|
||||
// ReportJUnitXML set the output format to junit xml
|
||||
ReportJUnitXML // JUnit XML format
|
||||
)
|
||||
|
||||
var text = `Results:
|
||||
@ -48,13 +57,28 @@ Summary:
|
||||
|
||||
`
|
||||
|
||||
func CreateReport(w io.Writer, format string, data *gas.Analyzer) error {
|
||||
type reportInfo struct {
|
||||
Issues []*gas.Issue
|
||||
Stats *gas.Metrics
|
||||
}
|
||||
|
||||
// CreateReport generates a report based for the supplied issues and metrics given
|
||||
// the specified format. The formats currently accepted are: json, csv, html and text.
|
||||
func CreateReport(w io.Writer, format string, issues []*gas.Issue, metrics *gas.Metrics) error {
|
||||
data := &reportInfo{
|
||||
Issues: issues,
|
||||
Stats: metrics,
|
||||
}
|
||||
var err error
|
||||
switch format {
|
||||
case "json":
|
||||
err = reportJSON(w, data)
|
||||
case "yaml":
|
||||
err = reportYAML(w, data)
|
||||
case "csv":
|
||||
err = reportCSV(w, data)
|
||||
case "junit-xml":
|
||||
err = reportJUnitXML(w, data)
|
||||
case "html":
|
||||
err = reportFromHTMLTemplate(w, html, data)
|
||||
case "text":
|
||||
@ -65,7 +89,7 @@ func CreateReport(w io.Writer, format string, data *gas.Analyzer) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func reportJSON(w io.Writer, data *gas.Analyzer) error {
|
||||
func reportJSON(w io.Writer, data *reportInfo) error {
|
||||
raw, err := json.MarshalIndent(data, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@ -78,13 +102,22 @@ func reportJSON(w io.Writer, data *gas.Analyzer) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func reportCSV(w io.Writer, data *gas.Analyzer) error {
|
||||
func reportYAML(w io.Writer, data *reportInfo) error {
|
||||
raw, err := yaml.Marshal(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = w.Write(raw)
|
||||
return err
|
||||
}
|
||||
|
||||
func reportCSV(w io.Writer, data *reportInfo) error {
|
||||
out := csv.NewWriter(w)
|
||||
defer out.Flush()
|
||||
for _, issue := range data.Issues {
|
||||
err := out.Write([]string{
|
||||
issue.File,
|
||||
strconv.Itoa(issue.Line),
|
||||
issue.Line,
|
||||
issue.What,
|
||||
issue.Severity.String(),
|
||||
issue.Confidence.String(),
|
||||
@ -97,7 +130,26 @@ func reportCSV(w io.Writer, data *gas.Analyzer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
||||
func reportJUnitXML(w io.Writer, data *reportInfo) error {
|
||||
groupedData := groupDataByRules(data)
|
||||
junitXMLStruct := createJUnitXMLStruct(groupedData)
|
||||
|
||||
raw, err := xml.MarshalIndent(junitXMLStruct, "", "\t")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
xmlHeader := []byte("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
|
||||
raw = append(xmlHeader, raw...)
|
||||
_, err = w.Write(raw)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *reportInfo) error {
|
||||
t, e := plainTemplate.New("gas").Parse(reportTemplate)
|
||||
if e != nil {
|
||||
return e
|
||||
@ -106,7 +158,7 @@ func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *gas.A
|
||||
return t.Execute(w, data)
|
||||
}
|
||||
|
||||
func reportFromHTMLTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
||||
func reportFromHTMLTemplate(w io.Writer, reportTemplate string, data *reportInfo) error {
|
||||
t, e := htmlTemplate.New("gas").Parse(reportTemplate)
|
||||
if e != nil {
|
||||
return e
|
||||
|
74
tools/vendor/github.com/GoASTScanner/gas/output/junit_xml_format.go
generated
vendored
Normal file
74
tools/vendor/github.com/GoASTScanner/gas/output/junit_xml_format.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package output
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
htmlLib "html"
|
||||
"strconv"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type junitXMLReport struct {
|
||||
XMLName xml.Name `xml:"testsuites"`
|
||||
Testsuites []testsuite `xml:"testsuite"`
|
||||
}
|
||||
|
||||
type testsuite struct {
|
||||
XMLName xml.Name `xml:"testsuite"`
|
||||
Name string `xml:"name,attr"`
|
||||
Tests int `xml:"tests,attr"`
|
||||
Testcases []testcase `xml:"testcase"`
|
||||
}
|
||||
|
||||
type testcase struct {
|
||||
XMLName xml.Name `xml:"testcase"`
|
||||
Name string `xml:"name,attr"`
|
||||
Failure failure `xml:"failure"`
|
||||
}
|
||||
|
||||
type failure struct {
|
||||
XMLName xml.Name `xml:"failure"`
|
||||
Message string `xml:"message,attr"`
|
||||
Text string `xml:",innerxml"`
|
||||
}
|
||||
|
||||
func generatePlaintext(issue *gas.Issue) string {
|
||||
return "Results:\n" +
|
||||
"[" + issue.File + ":" + issue.Line + "] - " +
|
||||
issue.What + " (Confidence: " + strconv.Itoa(int(issue.Confidence)) +
|
||||
", Severity: " + strconv.Itoa(int(issue.Severity)) + ")\n" + "> " + htmlLib.EscapeString(issue.Code)
|
||||
}
|
||||
|
||||
func groupDataByRules(data *reportInfo) map[string][]*gas.Issue {
|
||||
groupedData := make(map[string][]*gas.Issue)
|
||||
for _, issue := range data.Issues {
|
||||
if _, ok := groupedData[issue.What]; ok {
|
||||
groupedData[issue.What] = append(groupedData[issue.What], issue)
|
||||
} else {
|
||||
groupedData[issue.What] = []*gas.Issue{issue}
|
||||
}
|
||||
}
|
||||
return groupedData
|
||||
}
|
||||
|
||||
func createJUnitXMLStruct(groupedData map[string][]*gas.Issue) junitXMLReport {
|
||||
var xmlReport junitXMLReport
|
||||
for what, issues := range groupedData {
|
||||
testsuite := testsuite{
|
||||
Name: what,
|
||||
Tests: len(issues),
|
||||
}
|
||||
for _, issue := range issues {
|
||||
testcase := testcase{
|
||||
Name: issue.File,
|
||||
Failure: failure{
|
||||
Message: "Found 1 vulnerability. See stacktrace for details.",
|
||||
Text: generatePlaintext(issue),
|
||||
},
|
||||
}
|
||||
testsuite.Testcases = append(testsuite.Testcases, testcase)
|
||||
}
|
||||
xmlReport.Testsuites = append(xmlReport.Testsuites, testsuite)
|
||||
}
|
||||
return xmlReport
|
||||
}
|
@ -12,11 +12,12 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
package gas
|
||||
|
||||
import "go/ast"
|
||||
|
||||
func resolveIdent(n *ast.Ident, c *Context) bool {
|
||||
|
||||
if n.Obj == nil || n.Obj.Kind != ast.Var {
|
||||
return true
|
||||
}
|
58
tools/vendor/github.com/GoASTScanner/gas/rule.go
generated
vendored
Normal file
58
tools/vendor/github.com/GoASTScanner/gas/rule.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package gas
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// The Rule interface used by all rules supported by GAS.
|
||||
type Rule interface {
|
||||
Match(ast.Node, *Context) (*Issue, error)
|
||||
}
|
||||
|
||||
// RuleBuilder is used to register a rule definition with the analyzer
|
||||
type RuleBuilder func(c Config) (Rule, []ast.Node)
|
||||
|
||||
// A RuleSet maps lists of rules to the type of AST node they should be run on.
|
||||
// The anaylzer will only invoke rules contained in the list associated with the
|
||||
// type of AST node it is currently visiting.
|
||||
type RuleSet map[reflect.Type][]Rule
|
||||
|
||||
// NewRuleSet constructs a new RuleSet
|
||||
func NewRuleSet() RuleSet {
|
||||
return make(RuleSet)
|
||||
}
|
||||
|
||||
// Register adds a trigger for the supplied rule for the the
|
||||
// specified ast nodes.
|
||||
func (r RuleSet) Register(rule Rule, nodes ...ast.Node) {
|
||||
for _, n := range nodes {
|
||||
t := reflect.TypeOf(n)
|
||||
if rules, ok := r[t]; ok {
|
||||
r[t] = append(rules, rule)
|
||||
} else {
|
||||
r[t] = []Rule{rule}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RegisteredFor will return all rules that are registered for a
|
||||
// specified ast node.
|
||||
func (r RuleSet) RegisteredFor(n ast.Node) []Rule {
|
||||
if rules, found := r[reflect.TypeOf(n)]; found {
|
||||
return rules
|
||||
}
|
||||
return []Rule{}
|
||||
}
|
91
tools/vendor/github.com/GoASTScanner/gas/rulelist.go
generated
vendored
91
tools/vendor/github.com/GoASTScanner/gas/rulelist.go
generated
vendored
@ -1,91 +0,0 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas/rules"
|
||||
)
|
||||
|
||||
type RuleInfo struct {
|
||||
description string
|
||||
build func(map[string]interface{}) (gas.Rule, []ast.Node)
|
||||
}
|
||||
|
||||
// GetFullRuleList get the full list of all rules available to GAS
|
||||
func GetFullRuleList() map[string]RuleInfo {
|
||||
return map[string]RuleInfo{
|
||||
// misc
|
||||
"G101": RuleInfo{"Look for hardcoded credentials", rules.NewHardcodedCredentials},
|
||||
"G102": RuleInfo{"Bind to all interfaces", rules.NewBindsToAllNetworkInterfaces},
|
||||
"G103": RuleInfo{"Audit the use of unsafe block", rules.NewUsingUnsafe},
|
||||
"G104": RuleInfo{"Audit errors not checked", rules.NewNoErrorCheck},
|
||||
"G105": RuleInfo{"Audit the use of big.Exp function", rules.NewUsingBigExp},
|
||||
|
||||
// injection
|
||||
"G201": RuleInfo{"SQL query construction using format string", rules.NewSqlStrFormat},
|
||||
"G202": RuleInfo{"SQL query construction using string concatenation", rules.NewSqlStrConcat},
|
||||
"G203": RuleInfo{"Use of unescaped data in HTML templates", rules.NewTemplateCheck},
|
||||
"G204": RuleInfo{"Audit use of command execution", rules.NewSubproc},
|
||||
|
||||
// filesystem
|
||||
"G301": RuleInfo{"Poor file permissions used when creating a directory", rules.NewMkdirPerms},
|
||||
"G302": RuleInfo{"Poor file permisions used when creation file or using chmod", rules.NewFilePerms},
|
||||
"G303": RuleInfo{"Creating tempfile using a predictable path", rules.NewBadTempFile},
|
||||
|
||||
// crypto
|
||||
"G401": RuleInfo{"Detect the usage of DES, RC4, or MD5", rules.NewUsesWeakCryptography},
|
||||
"G402": RuleInfo{"Look for bad TLS connection settings", rules.NewIntermediateTlsCheck},
|
||||
"G403": RuleInfo{"Ensure minimum RSA key length of 2048 bits", rules.NewWeakKeyStrength},
|
||||
"G404": RuleInfo{"Insecure random number source (rand)", rules.NewWeakRandCheck},
|
||||
|
||||
// blacklist
|
||||
"G501": RuleInfo{"Import blacklist: crypto/md5", rules.NewBlacklist_crypto_md5},
|
||||
"G502": RuleInfo{"Import blacklist: crypto/des", rules.NewBlacklist_crypto_des},
|
||||
"G503": RuleInfo{"Import blacklist: crypto/rc4", rules.NewBlacklist_crypto_rc4},
|
||||
"G504": RuleInfo{"Import blacklist: net/http/cgi", rules.NewBlacklist_net_http_cgi},
|
||||
}
|
||||
}
|
||||
|
||||
func AddRules(analyzer *gas.Analyzer, conf map[string]interface{}) {
|
||||
var all map[string]RuleInfo
|
||||
|
||||
inc := conf["include"].([]string)
|
||||
exc := conf["exclude"].([]string)
|
||||
|
||||
// add included rules
|
||||
if len(inc) == 0 {
|
||||
all = GetFullRuleList()
|
||||
} else {
|
||||
all = map[string]RuleInfo{}
|
||||
tmp := GetFullRuleList()
|
||||
for _, v := range inc {
|
||||
if val, ok := tmp[v]; ok {
|
||||
all[v] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove excluded rules
|
||||
for _, v := range exc {
|
||||
delete(all, v)
|
||||
}
|
||||
|
||||
for _, v := range all {
|
||||
analyzer.AddRule(v.build(conf))
|
||||
}
|
||||
}
|
13
tools/vendor/github.com/GoASTScanner/gas/rules/big.go
generated
vendored
13
tools/vendor/github.com/GoASTScanner/gas/rules/big.go
generated
vendored
@ -15,24 +15,27 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type UsingBigExp struct {
|
||||
type usingBigExp struct {
|
||||
gas.MetaData
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func (r *UsingBigExp) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
func (r *usingBigExp) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if _, matched := gas.MatchCallByType(n, c, r.pkg, r.calls...); matched {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
func NewUsingBigExp(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &UsingBigExp{
|
||||
|
||||
// NewUsingBigExp detects issues with modulus == 0 for Bignum
|
||||
func NewUsingBigExp(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &usingBigExp{
|
||||
pkg: "*math/big.Int",
|
||||
calls: []string{"Exp"},
|
||||
MetaData: gas.MetaData{
|
||||
|
33
tools/vendor/github.com/GoASTScanner/gas/rules/bind.go
generated
vendored
33
tools/vendor/github.com/GoASTScanner/gas/rules/bind.go
generated
vendored
@ -18,30 +18,37 @@ import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
// Looks for net.Listen("0.0.0.0") or net.Listen(":8080")
|
||||
type BindsToAllNetworkInterfaces struct {
|
||||
type bindsToAllNetworkInterfaces struct {
|
||||
gas.MetaData
|
||||
call *regexp.Regexp
|
||||
calls gas.CallList
|
||||
pattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (r *BindsToAllNetworkInterfaces) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, r.call); node != nil {
|
||||
if arg, err := gas.GetString(node.Args[1]); err == nil {
|
||||
if r.pattern.MatchString(arg) {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
callExpr := r.calls.ContainsCallExpr(n, c)
|
||||
if callExpr == nil {
|
||||
return nil, nil
|
||||
}
|
||||
if arg, err := gas.GetString(callExpr.Args[1]); err == nil {
|
||||
if r.pattern.MatchString(arg) {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
return
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewBindsToAllNetworkInterfaces(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BindsToAllNetworkInterfaces{
|
||||
call: regexp.MustCompile(`^(net|tls)\.Listen$`),
|
||||
// NewBindsToAllNetworkInterfaces detects socket connections that are setup to
|
||||
// listen on all network interfaces.
|
||||
func NewBindsToAllNetworkInterfaces(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
calls := gas.NewCallList()
|
||||
calls.Add("net", "Listen")
|
||||
calls.Add("crypto/tls", "Listen")
|
||||
return &bindsToAllNetworkInterfaces{
|
||||
calls: calls,
|
||||
pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
|
79
tools/vendor/github.com/GoASTScanner/gas/rules/blacklist.go
generated
vendored
79
tools/vendor/github.com/GoASTScanner/gas/rules/blacklist.go
generated
vendored
@ -16,64 +16,67 @@ package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"strings"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type BlacklistImport struct {
|
||||
type blacklistedImport struct {
|
||||
gas.MetaData
|
||||
Path string
|
||||
Blacklisted map[string]string
|
||||
}
|
||||
|
||||
func (r *BlacklistImport) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
func unquote(original string) string {
|
||||
copy := strings.TrimSpace(original)
|
||||
copy = strings.TrimLeft(copy, `"`)
|
||||
return strings.TrimRight(copy, `"`)
|
||||
}
|
||||
|
||||
func (r *blacklistedImport) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node, ok := n.(*ast.ImportSpec); ok {
|
||||
if r.Path == node.Path.Value && node.Name.String() != "_" {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
if description, ok := r.Blacklisted[unquote(node.Path.Value)]; ok {
|
||||
return gas.NewIssue(c, node, description, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_md5(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
// NewBlacklistedImports reports when a blacklisted import is being used.
|
||||
// Typically when a deprecated technology is being used.
|
||||
func NewBlacklistedImports(conf gas.Config, blacklist map[string]string) (gas.Rule, []ast.Node) {
|
||||
return &blacklistedImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/md5"`,
|
||||
Blacklisted: blacklist,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_des(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/des"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
// NewBlacklistedImportMD5 fails if MD5 is imported
|
||||
func NewBlacklistedImportMD5(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return NewBlacklistedImports(conf, map[string]string{
|
||||
"crypto/md5": "Blacklisted import crypto/md5: weak cryptographic primitive",
|
||||
})
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_rc4(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/rc4"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
// NewBlacklistedImportDES fails if DES is imported
|
||||
func NewBlacklistedImportDES(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return NewBlacklistedImports(conf, map[string]string{
|
||||
"crypto/des": "Blacklisted import crypto/des: weak cryptographic primitive",
|
||||
})
|
||||
}
|
||||
|
||||
func NewBlacklist_net_http_cgi(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)",
|
||||
},
|
||||
Path: `"net/http/cgi"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
// NewBlacklistedImportRC4 fails if DES is imported
|
||||
func NewBlacklistedImportRC4(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return NewBlacklistedImports(conf, map[string]string{
|
||||
"crypto/rc4": "Blacklisted import crypto/rc4: weak cryptographic primitive",
|
||||
})
|
||||
}
|
||||
|
||||
// NewBlacklistedImportCGI fails if CGI is imported
|
||||
func NewBlacklistedImportCGI(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return NewBlacklistedImports(conf, map[string]string{
|
||||
"net/http/cgi": "Blacklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)",
|
||||
})
|
||||
}
|
||||
|
20
tools/vendor/github.com/GoASTScanner/gas/rules/errors.go
generated
vendored
20
tools/vendor/github.com/GoASTScanner/gas/rules/errors.go
generated
vendored
@ -15,12 +15,13 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type NoErrorCheck struct {
|
||||
type noErrorCheck struct {
|
||||
gas.MetaData
|
||||
whitelist gas.CallList
|
||||
}
|
||||
@ -29,7 +30,7 @@ func returnsError(callExpr *ast.CallExpr, ctx *gas.Context) int {
|
||||
if tv := ctx.Info.TypeOf(callExpr); tv != nil {
|
||||
switch t := tv.(type) {
|
||||
case *types.Tuple:
|
||||
for pos := 0; pos < t.Len(); pos += 1 {
|
||||
for pos := 0; pos < t.Len(); pos++ {
|
||||
variable := t.At(pos)
|
||||
if variable != nil && variable.Type().String() == "error" {
|
||||
return pos
|
||||
@ -44,11 +45,11 @@ func returnsError(callExpr *ast.CallExpr, ctx *gas.Context) int {
|
||||
return -1
|
||||
}
|
||||
|
||||
func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
func (r *noErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
switch stmt := n.(type) {
|
||||
case *ast.AssignStmt:
|
||||
for _, expr := range stmt.Rhs {
|
||||
if callExpr, ok := expr.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
||||
if callExpr, ok := expr.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(expr, ctx) == nil {
|
||||
pos := returnsError(callExpr, ctx)
|
||||
if pos < 0 || pos >= len(stmt.Lhs) {
|
||||
return nil, nil
|
||||
@ -59,7 +60,7 @@ func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
}
|
||||
}
|
||||
case *ast.ExprStmt:
|
||||
if callExpr, ok := stmt.X.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
||||
if callExpr, ok := stmt.X.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(stmt.X, ctx) == nil {
|
||||
pos := returnsError(callExpr, ctx)
|
||||
if pos >= 0 {
|
||||
return gas.NewIssue(ctx, n, r.What, r.Severity, r.Confidence), nil
|
||||
@ -69,13 +70,14 @@ func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewNoErrorCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// NewNoErrorCheck detects if the returned error is unchecked
|
||||
func NewNoErrorCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
|
||||
// TODO(gm) Come up with sensible defaults here. Or flip it to use a
|
||||
// black list instead.
|
||||
whitelist := gas.NewCallList()
|
||||
whitelist.AddAll("bytes.Buffer", "Write", "WriteByte", "WriteRune", "WriteString")
|
||||
whitelist.AddAll("fmt", "Print", "Printf", "Println")
|
||||
whitelist.AddAll("fmt", "Print", "Printf", "Println", "Fprint", "Fprintf", "Fprintln")
|
||||
whitelist.Add("io.PipeWriter", "CloseWithError")
|
||||
|
||||
if configured, ok := conf["G104"]; ok {
|
||||
@ -85,7 +87,7 @@ func NewNoErrorCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
}
|
||||
}
|
||||
}
|
||||
return &NoErrorCheck{
|
||||
return &noErrorCheck{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Low,
|
||||
Confidence: gas.High,
|
||||
|
22
tools/vendor/github.com/GoASTScanner/gas/rules/fileperms.go
generated
vendored
22
tools/vendor/github.com/GoASTScanner/gas/rules/fileperms.go
generated
vendored
@ -19,10 +19,10 @@ import (
|
||||
"go/ast"
|
||||
"strconv"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type FilePermissions struct {
|
||||
type filePermissions struct {
|
||||
gas.MetaData
|
||||
mode int64
|
||||
pkg string
|
||||
@ -30,7 +30,7 @@ type FilePermissions struct {
|
||||
}
|
||||
|
||||
func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 {
|
||||
var mode int64 = defaultMode
|
||||
var mode = defaultMode
|
||||
if value, ok := conf[configKey]; ok {
|
||||
switch value.(type) {
|
||||
case int64:
|
||||
@ -46,7 +46,7 @@ func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMod
|
||||
return mode
|
||||
}
|
||||
|
||||
func (r *FilePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
func (r *filePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if callexpr, matched := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matched {
|
||||
modeArg := callexpr.Args[len(callexpr.Args)-1]
|
||||
if mode, err := gas.GetInt(modeArg); err == nil && mode > r.mode {
|
||||
@ -56,9 +56,11 @@ func (r *FilePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewFilePerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// NewFilePerms creates a rule to detect file creation with a more permissive than configured
|
||||
// permission mask.
|
||||
func NewFilePerms(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
mode := getConfiguredMode(conf, "G302", 0600)
|
||||
return &FilePermissions{
|
||||
return &filePermissions{
|
||||
mode: mode,
|
||||
pkg: "os",
|
||||
calls: []string{"OpenFile", "Chmod"},
|
||||
@ -70,9 +72,11 @@ func NewFilePerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
||||
|
||||
func NewMkdirPerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
mode := getConfiguredMode(conf, "G301", 0700)
|
||||
return &FilePermissions{
|
||||
// NewMkdirPerms creates a rule to detect directory creation with more permissive than
|
||||
// configured permission mask.
|
||||
func NewMkdirPerms(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
mode := getConfiguredMode(conf, "G301", 0750)
|
||||
return &filePermissions{
|
||||
mode: mode,
|
||||
pkg: "os",
|
||||
calls: []string{"Mkdir", "MkdirAll"},
|
||||
|
24
tools/vendor/github.com/GoASTScanner/gas/rules/hardcoded_credentials.go
generated
vendored
24
tools/vendor/github.com/GoASTScanner/gas/rules/hardcoded_credentials.go
generated
vendored
@ -15,16 +15,16 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"regexp"
|
||||
|
||||
"github.com/nbutton23/zxcvbn-go"
|
||||
"strconv"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
"github.com/nbutton23/zxcvbn-go"
|
||||
)
|
||||
|
||||
type Credentials struct {
|
||||
type credentials struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
entropyThreshold float64
|
||||
@ -40,7 +40,7 @@ func truncate(s string, n int) string {
|
||||
return s[:n]
|
||||
}
|
||||
|
||||
func (r *Credentials) isHighEntropyString(str string) bool {
|
||||
func (r *credentials) isHighEntropyString(str string) bool {
|
||||
s := truncate(str, r.truncate)
|
||||
info := zxcvbn.PasswordStrength(s, []string{})
|
||||
entropyPerChar := info.Entropy / float64(len(s))
|
||||
@ -49,7 +49,7 @@ func (r *Credentials) isHighEntropyString(str string) bool {
|
||||
entropyPerChar >= r.perCharThreshold))
|
||||
}
|
||||
|
||||
func (r *Credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
func (r *credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
switch node := n.(type) {
|
||||
case *ast.AssignStmt:
|
||||
return r.matchAssign(node, ctx)
|
||||
@ -59,7 +59,7 @@ func (r *Credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *Credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*gas.Issue, error) {
|
||||
func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*gas.Issue, error) {
|
||||
for _, i := range assign.Lhs {
|
||||
if ident, ok := i.(*ast.Ident); ok {
|
||||
if r.pattern.MatchString(ident.Name) {
|
||||
@ -76,7 +76,7 @@ func (r *Credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*ga
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *Credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Issue, error) {
|
||||
func (r *credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Issue, error) {
|
||||
if decl.Tok != token.CONST && decl.Tok != token.VAR {
|
||||
return nil, nil
|
||||
}
|
||||
@ -100,12 +100,14 @@ func (r *Credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Is
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewHardcodedCredentials(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// NewHardcodedCredentials attempts to find high entropy string constants being
|
||||
// assigned to variables that appear to be related to credentials.
|
||||
func NewHardcodedCredentials(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
pattern := `(?i)passwd|pass|password|pwd|secret|token`
|
||||
entropyThreshold := 80.0
|
||||
perCharThreshold := 3.0
|
||||
ignoreEntropy := false
|
||||
var truncateString int = 16
|
||||
var truncateString = 16
|
||||
if val, ok := conf["G101"]; ok {
|
||||
conf := val.(map[string]string)
|
||||
if configPattern, ok := conf["pattern"]; ok {
|
||||
@ -133,7 +135,7 @@ func NewHardcodedCredentials(conf map[string]interface{}) (gas.Rule, []ast.Node)
|
||||
}
|
||||
}
|
||||
|
||||
return &Credentials{
|
||||
return &credentials{
|
||||
pattern: regexp.MustCompile(pattern),
|
||||
entropyThreshold: entropyThreshold,
|
||||
perCharThreshold: perCharThreshold,
|
||||
|
11
tools/vendor/github.com/GoASTScanner/gas/rules/rand.go
generated
vendored
11
tools/vendor/github.com/GoASTScanner/gas/rules/rand.go
generated
vendored
@ -17,16 +17,16 @@ package rules
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type WeakRand struct {
|
||||
type weakRand struct {
|
||||
gas.MetaData
|
||||
funcNames []string
|
||||
packagePath string
|
||||
}
|
||||
|
||||
func (w *WeakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
func (w *weakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
for _, funcName := range w.funcNames {
|
||||
if _, matched := gas.MatchCallByPackage(n, c, w.packagePath, funcName); matched {
|
||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||
@ -36,8 +36,9 @@ func (w *WeakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewWeakRandCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &WeakRand{
|
||||
// NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure
|
||||
func NewWeakRandCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &weakRand{
|
||||
funcNames: []string{"Read", "Int"},
|
||||
packagePath: "math/rand",
|
||||
MetaData: gas.MetaData{
|
||||
|
26
tools/vendor/github.com/GoASTScanner/gas/rules/rsa.go
generated
vendored
26
tools/vendor/github.com/GoASTScanner/gas/rules/rsa.go
generated
vendored
@ -17,31 +17,33 @@ package rules
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type WeakKeyStrength struct {
|
||||
type weakKeyStrength struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
bits int
|
||||
calls gas.CallList
|
||||
bits int
|
||||
}
|
||||
|
||||
func (w *WeakKeyStrength) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := gas.MatchCall(n, w.pattern); node != nil {
|
||||
if bits, err := gas.GetInt(node.Args[1]); err == nil && bits < (int64)(w.bits) {
|
||||
func (w *weakKeyStrength) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if callExpr := w.calls.ContainsCallExpr(n, c); callExpr != nil {
|
||||
if bits, err := gas.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) {
|
||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewWeakKeyStrength(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits
|
||||
func NewWeakKeyStrength(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
calls := gas.NewCallList()
|
||||
calls.Add("crypto/rsa", "GenerateKey")
|
||||
bits := 2048
|
||||
return &WeakKeyStrength{
|
||||
pattern: regexp.MustCompile(`^rsa\.GenerateKey$`),
|
||||
bits: bits,
|
||||
return &weakKeyStrength{
|
||||
calls: calls,
|
||||
bits: bits,
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
|
102
tools/vendor/github.com/GoASTScanner/gas/rules/rulelist.go
generated
vendored
Normal file
102
tools/vendor/github.com/GoASTScanner/gas/rules/rulelist.go
generated
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
// RuleDefinition contains the description of a rule and a mechanism to
|
||||
// create it.
|
||||
type RuleDefinition struct {
|
||||
Description string
|
||||
Create gas.RuleBuilder
|
||||
}
|
||||
|
||||
// RuleList is a mapping of rule ID's to rule definitions
|
||||
type RuleList map[string]RuleDefinition
|
||||
|
||||
// Builders returns all the create methods for a given rule list
|
||||
func (rl RuleList) Builders() []gas.RuleBuilder {
|
||||
builders := make([]gas.RuleBuilder, 0, len(rl))
|
||||
for _, def := range rl {
|
||||
builders = append(builders, def.Create)
|
||||
}
|
||||
return builders
|
||||
}
|
||||
|
||||
// RuleFilter can be used to include or exclude a rule depending on the return
|
||||
// value of the function
|
||||
type RuleFilter func(string) bool
|
||||
|
||||
// NewRuleFilter is a closure that will include/exclude the rule ID's based on
|
||||
// the supplied boolean value.
|
||||
func NewRuleFilter(action bool, ruleIDs ...string) RuleFilter {
|
||||
rulelist := make(map[string]bool)
|
||||
for _, rule := range ruleIDs {
|
||||
rulelist[rule] = true
|
||||
}
|
||||
return func(rule string) bool {
|
||||
if _, found := rulelist[rule]; found {
|
||||
return action
|
||||
}
|
||||
return !action
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the list of rules to use
|
||||
func Generate(filters ...RuleFilter) RuleList {
|
||||
rules := map[string]RuleDefinition{
|
||||
// misc
|
||||
"G101": {"Look for hardcoded credentials", NewHardcodedCredentials},
|
||||
"G102": {"Bind to all interfaces", NewBindsToAllNetworkInterfaces},
|
||||
"G103": {"Audit the use of unsafe block", NewUsingUnsafe},
|
||||
"G104": {"Audit errors not checked", NewNoErrorCheck},
|
||||
"G105": {"Audit the use of big.Exp function", NewUsingBigExp},
|
||||
"G106": {"Audit the use of ssh.InsecureIgnoreHostKey function", NewSSHHostKey},
|
||||
|
||||
// injection
|
||||
"G201": {"SQL query construction using format string", NewSQLStrFormat},
|
||||
"G202": {"SQL query construction using string concatenation", NewSQLStrConcat},
|
||||
"G203": {"Use of unescaped data in HTML templates", NewTemplateCheck},
|
||||
"G204": {"Audit use of command execution", NewSubproc},
|
||||
|
||||
// filesystem
|
||||
"G301": {"Poor file permissions used when creating a directory", NewMkdirPerms},
|
||||
"G302": {"Poor file permisions used when creation file or using chmod", NewFilePerms},
|
||||
"G303": {"Creating tempfile using a predictable path", NewBadTempFile},
|
||||
|
||||
// crypto
|
||||
"G401": {"Detect the usage of DES, RC4, or MD5", NewUsesWeakCryptography},
|
||||
"G402": {"Look for bad TLS connection settings", NewIntermediateTLSCheck},
|
||||
"G403": {"Ensure minimum RSA key length of 2048 bits", NewWeakKeyStrength},
|
||||
"G404": {"Insecure random number source (rand)", NewWeakRandCheck},
|
||||
|
||||
// blacklist
|
||||
"G501": {"Import blacklist: crypto/md5", NewBlacklistedImportMD5},
|
||||
"G502": {"Import blacklist: crypto/des", NewBlacklistedImportDES},
|
||||
"G503": {"Import blacklist: crypto/rc4", NewBlacklistedImportRC4},
|
||||
"G504": {"Import blacklist: net/http/cgi", NewBlacklistedImportCGI},
|
||||
}
|
||||
|
||||
for rule := range rules {
|
||||
for _, filter := range filters {
|
||||
if filter(rule) {
|
||||
delete(rules, rule)
|
||||
}
|
||||
}
|
||||
}
|
||||
return rules
|
||||
}
|
74
tools/vendor/github.com/GoASTScanner/gas/rules/sql.go
generated
vendored
74
tools/vendor/github.com/GoASTScanner/gas/rules/sql.go
generated
vendored
@ -18,20 +18,32 @@ import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type SqlStatement struct {
|
||||
type sqlStatement struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
|
||||
// Contains a list of patterns which must all match for the rule to match.
|
||||
patterns []*regexp.Regexp
|
||||
}
|
||||
|
||||
type SqlStrConcat struct {
|
||||
SqlStatement
|
||||
// See if the string matches the patterns for the statement.
|
||||
func (s sqlStatement) MatchPatterns(str string) bool {
|
||||
for _, pattern := range s.patterns {
|
||||
if !pattern.MatchString(str) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
type sqlStrConcat struct {
|
||||
sqlStatement
|
||||
}
|
||||
|
||||
// see if we can figure out what it is
|
||||
func (s *SqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||
func (s *sqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||
if n.Obj != nil {
|
||||
return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun
|
||||
}
|
||||
@ -39,10 +51,13 @@ func (s *SqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||
}
|
||||
|
||||
// Look for "SELECT * FROM table WHERE " + " ' OR 1=1"
|
||||
func (s *SqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
func (s *sqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node, ok := n.(*ast.BinaryExpr); ok {
|
||||
if start, ok := node.X.(*ast.BasicLit); ok {
|
||||
if str, e := gas.GetString(start); s.pattern.MatchString(str) && e == nil {
|
||||
if str, e := gas.GetString(start); e == nil {
|
||||
if !s.MatchPatterns(str) {
|
||||
return nil, nil
|
||||
}
|
||||
if _, ok := node.Y.(*ast.BasicLit); ok {
|
||||
return nil, nil // string cat OK
|
||||
}
|
||||
@ -56,10 +71,13 @@ func (s *SqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSqlStrConcat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &SqlStrConcat{
|
||||
SqlStatement: SqlStatement{
|
||||
pattern: regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `),
|
||||
// NewSQLStrConcat looks for cases where we are building SQL strings via concatenation
|
||||
func NewSQLStrConcat(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &sqlStrConcat{
|
||||
sqlStatement: sqlStatement{
|
||||
patterns: []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `),
|
||||
},
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
@ -69,31 +87,39 @@ func NewSqlStrConcat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
}, []ast.Node{(*ast.BinaryExpr)(nil)}
|
||||
}
|
||||
|
||||
type SqlStrFormat struct {
|
||||
SqlStatement
|
||||
call *regexp.Regexp
|
||||
type sqlStrFormat struct {
|
||||
sqlStatement
|
||||
calls gas.CallList
|
||||
}
|
||||
|
||||
// Looks for "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)"
|
||||
func (s *SqlStrFormat) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, s.call); node != nil {
|
||||
if arg, e := gas.GetString(node.Args[0]); s.pattern.MatchString(arg) && e == nil {
|
||||
func (s *sqlStrFormat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
|
||||
// TODO(gm) improve confidence if database/sql is being used
|
||||
if node := s.calls.ContainsCallExpr(n, c); node != nil {
|
||||
if arg, e := gas.GetString(node.Args[0]); s.MatchPatterns(arg) && e == nil {
|
||||
return gas.NewIssue(c, n, s.What, s.Severity, s.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSqlStrFormat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &SqlStrFormat{
|
||||
call: regexp.MustCompile(`^fmt\.Sprintf$`),
|
||||
SqlStatement: SqlStatement{
|
||||
pattern: regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "),
|
||||
// NewSQLStrFormat looks for cases where we're building SQL query strings using format strings
|
||||
func NewSQLStrFormat(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
rule := &sqlStrFormat{
|
||||
calls: gas.NewCallList(),
|
||||
sqlStatement: sqlStatement{
|
||||
patterns: []*regexp.Regexp{
|
||||
regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "),
|
||||
regexp.MustCompile("%[^bdoxXfFp]"),
|
||||
},
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "SQL string formatting",
|
||||
},
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
||||
rule.calls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln")
|
||||
return rule, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
||||
|
33
tools/vendor/github.com/GoASTScanner/gas/rules/ssh.go
generated
vendored
Normal file
33
tools/vendor/github.com/GoASTScanner/gas/rules/ssh.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type sshHostKey struct {
|
||||
gas.MetaData
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func (r *sshHostKey) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if _, matches := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matches {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// NewSSHHostKey rule detects the use of insecure ssh HostKeyCallback.
|
||||
func NewSSHHostKey(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &sshHostKey{
|
||||
pkg: "golang.org/x/crypto/ssh",
|
||||
calls: []string{"InsecureIgnoreHostKey"},
|
||||
MetaData: gas.MetaData{
|
||||
What: "Use of ssh InsecureIgnoreHostKey should be audited",
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
52
tools/vendor/github.com/GoASTScanner/gas/rules/subproc.go
generated
vendored
52
tools/vendor/github.com/GoASTScanner/gas/rules/subproc.go
generated
vendored
@ -16,41 +16,43 @@ package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
"strings"
|
||||
"go/types"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type Subprocess struct {
|
||||
pattern *regexp.Regexp
|
||||
type subprocess struct {
|
||||
gas.CallList
|
||||
}
|
||||
|
||||
func (r *Subprocess) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := gas.MatchCall(n, r.pattern); node != nil {
|
||||
// TODO(gm) The only real potential for command injection with a Go project
|
||||
// is something like this:
|
||||
//
|
||||
// syscall.Exec("/bin/sh", []string{"-c", tainted})
|
||||
//
|
||||
// E.g. Input is correctly escaped but the execution context being used
|
||||
// is unsafe. For example:
|
||||
//
|
||||
// syscall.Exec("echo", "foobar" + tainted)
|
||||
func (r *subprocess) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := r.ContainsCallExpr(n, c); node != nil {
|
||||
for _, arg := range node.Args {
|
||||
if !gas.TryResolve(arg, c) {
|
||||
what := "Subprocess launching with variable."
|
||||
return gas.NewIssue(c, n, what, gas.High, gas.High), nil
|
||||
if ident, ok := arg.(*ast.Ident); ok {
|
||||
obj := c.Info.ObjectOf(ident)
|
||||
if _, ok := obj.(*types.Var); ok && !gas.TryResolve(ident, c) {
|
||||
return gas.NewIssue(c, n, "Subprocess launched with variable", gas.Medium, gas.High), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// call with partially qualified command
|
||||
if str, err := gas.GetString(node.Args[0]); err == nil {
|
||||
if !strings.HasPrefix(str, "/") {
|
||||
what := "Subprocess launching with partial path."
|
||||
return gas.NewIssue(c, n, what, gas.Medium, gas.High), nil
|
||||
}
|
||||
}
|
||||
|
||||
what := "Subprocess launching should be audited."
|
||||
return gas.NewIssue(c, n, what, gas.Low, gas.High), nil
|
||||
return gas.NewIssue(c, n, "Subprocess launching should be audited", gas.Low, gas.High), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSubproc(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &Subprocess{
|
||||
pattern: regexp.MustCompile(`^exec\.Command|syscall\.Exec$`),
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
// NewSubproc detects cases where we are forking out to an external process
|
||||
func NewSubproc(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
rule := &subprocess{gas.NewCallList()}
|
||||
rule.Add("os/exec", "Command")
|
||||
rule.Add("syscall", "Exec")
|
||||
return rule, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
||||
|
24
tools/vendor/github.com/GoASTScanner/gas/rules/tempfiles.go
generated
vendored
24
tools/vendor/github.com/GoASTScanner/gas/rules/tempfiles.go
generated
vendored
@ -18,17 +18,17 @@ import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type BadTempFile struct {
|
||||
type badTempFile struct {
|
||||
gas.MetaData
|
||||
args *regexp.Regexp
|
||||
call *regexp.Regexp
|
||||
calls gas.CallList
|
||||
args *regexp.Regexp
|
||||
}
|
||||
|
||||
func (t *BadTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, t.call); node != nil {
|
||||
func (t *badTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := t.calls.ContainsCallExpr(n, c); node != nil {
|
||||
if arg, e := gas.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil {
|
||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||
}
|
||||
@ -36,10 +36,14 @@ func (t *BadTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err erro
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewBadTempFile(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BadTempFile{
|
||||
call: regexp.MustCompile(`ioutil\.WriteFile|os\.Create`),
|
||||
args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`),
|
||||
// NewBadTempFile detects direct writes to predictable path in temporary directory
|
||||
func NewBadTempFile(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
calls := gas.NewCallList()
|
||||
calls.Add("io/ioutil", "WriteFile")
|
||||
calls.Add("os", "Create")
|
||||
return &badTempFile{
|
||||
calls: calls,
|
||||
args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
|
25
tools/vendor/github.com/GoASTScanner/gas/rules/templates.go
generated
vendored
25
tools/vendor/github.com/GoASTScanner/gas/rules/templates.go
generated
vendored
@ -16,18 +16,17 @@ package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type TemplateCheck struct {
|
||||
type templateCheck struct {
|
||||
gas.MetaData
|
||||
call *regexp.Regexp
|
||||
calls gas.CallList
|
||||
}
|
||||
|
||||
func (t *TemplateCheck) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, t.call); node != nil {
|
||||
func (t *templateCheck) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := t.calls.ContainsCallExpr(n, c); node != nil {
|
||||
for _, arg := range node.Args {
|
||||
if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe
|
||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||
@ -37,9 +36,17 @@ func (t *TemplateCheck) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err er
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewTemplateCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &TemplateCheck{
|
||||
call: regexp.MustCompile(`^template\.(HTML|JS|URL)$`),
|
||||
// NewTemplateCheck constructs the template check rule. This rule is used to
|
||||
// find use of tempaltes where HTML/JS escaping is not being used
|
||||
func NewTemplateCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
|
||||
calls := gas.NewCallList()
|
||||
calls.Add("html/template", "HTML")
|
||||
calls.Add("html/template", "HTMLAttr")
|
||||
calls.Add("html/template", "JS")
|
||||
calls.Add("html/template", "URL")
|
||||
return &templateCheck{
|
||||
calls: calls,
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.Low,
|
||||
|
124
tools/vendor/github.com/GoASTScanner/gas/rules/tls.go
generated
vendored
124
tools/vendor/github.com/GoASTScanner/gas/rules/tls.go
generated
vendored
@ -12,22 +12,22 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//go:generate tlsconfig
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type InsecureConfigTLS struct {
|
||||
MinVersion int16
|
||||
MaxVersion int16
|
||||
pattern *regexp.Regexp
|
||||
goodCiphers []string
|
||||
type insecureConfigTLS struct {
|
||||
MinVersion int16
|
||||
MaxVersion int16
|
||||
requiredType string
|
||||
goodCiphers []string
|
||||
}
|
||||
|
||||
func stringInSlice(a string, list []string) bool {
|
||||
@ -39,15 +39,14 @@ func stringInSlice(a string, list []string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) processTlsCipherSuites(n ast.Node, c *gas.Context) *gas.Issue {
|
||||
a := reflect.TypeOf(&ast.KeyValueExpr{})
|
||||
b := reflect.TypeOf(&ast.CompositeLit{})
|
||||
if node, ok := gas.SimpleSelect(n, a, b).(*ast.CompositeLit); ok {
|
||||
for _, elt := range node.Elts {
|
||||
if ident, ok := elt.(*ast.SelectorExpr); ok {
|
||||
func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gas.Context) *gas.Issue {
|
||||
|
||||
if ciphers, ok := n.(*ast.CompositeLit); ok {
|
||||
for _, cipher := range ciphers.Elts {
|
||||
if ident, ok := cipher.(*ast.SelectorExpr); ok {
|
||||
if !stringInSlice(ident.Sel.Name, t.goodCiphers) {
|
||||
str := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name)
|
||||
return gas.NewIssue(c, n, str, gas.High, gas.High)
|
||||
err := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name)
|
||||
return gas.NewIssue(c, ident, err, gas.High, gas.High)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -55,9 +54,10 @@ func (t *InsecureConfigTLS) processTlsCipherSuites(n ast.Node, c *gas.Context) *
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Context) *gas.Issue {
|
||||
func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gas.Context) *gas.Issue {
|
||||
if ident, ok := n.Key.(*ast.Ident); ok {
|
||||
switch ident.Name {
|
||||
|
||||
case "InsecureSkipVerify":
|
||||
if node, ok := n.Value.(*ast.Ident); ok {
|
||||
if node.Name != "false" {
|
||||
@ -97,7 +97,7 @@ func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Contex
|
||||
}
|
||||
|
||||
case "CipherSuites":
|
||||
if ret := t.processTlsCipherSuites(n, c); ret != nil {
|
||||
if ret := t.processTLSCipherSuites(n.Value, c); ret != nil {
|
||||
return ret
|
||||
}
|
||||
|
||||
@ -107,85 +107,19 @@ func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Contex
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCompLit(n, t.pattern); node != nil {
|
||||
for _, elt := range node.Elts {
|
||||
if kve, ok := elt.(*ast.KeyValueExpr); ok {
|
||||
gi = t.processTlsConfVal(kve, c)
|
||||
if gi != nil {
|
||||
break
|
||||
func (t *insecureConfigTLS) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if complit, ok := n.(*ast.CompositeLit); ok && complit.Type != nil {
|
||||
actualType := c.Info.TypeOf(complit.Type)
|
||||
if actualType != nil && actualType.String() == t.requiredType {
|
||||
for _, elt := range complit.Elts {
|
||||
if kve, ok := elt.(*ast.KeyValueExpr); ok {
|
||||
issue := t.processTLSConfVal(kve, c)
|
||||
if issue != nil {
|
||||
return issue, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func NewModernTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0303, // TLS 1.2 only
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
func NewIntermediateTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
func NewCompatTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Old_compatibility_.28default.29
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
return nil, nil
|
||||
}
|
||||
|
132
tools/vendor/github.com/GoASTScanner/gas/rules/tls_config.go
generated
vendored
Normal file
132
tools/vendor/github.com/GoASTScanner/gas/rules/tls_config.go
generated
vendored
Normal file
@ -0,0 +1,132 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
// NewModernTLSCheck creates a check for Modern TLS ciphers
|
||||
// DO NOT EDIT - generated by tlsconfig tool
|
||||
func NewModernTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &insecureConfigTLS{
|
||||
requiredType: "crypto/tls.Config",
|
||||
MinVersion: 0x0303,
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
// NewIntermediateTLSCheck creates a check for Intermediate TLS ciphers
|
||||
// DO NOT EDIT - generated by tlsconfig tool
|
||||
func NewIntermediateTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &insecureConfigTLS{
|
||||
requiredType: "crypto/tls.Config",
|
||||
MinVersion: 0x0301,
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_DHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_DHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_AES_256_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA256",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
// NewOldTLSCheck creates a check for Old TLS ciphers
|
||||
// DO NOT EDIT - generated by tlsconfig tool
|
||||
func NewOldTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &insecureConfigTLS{
|
||||
requiredType: "crypto/tls.Config",
|
||||
MinVersion: 0x0300,
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_DHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_DHE_DSS_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_DHE_DSS_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_DHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_DHE_DSS_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_AES_256_CBC_SHA256",
|
||||
"TLS_DHE_DSS_WITH_AES_256_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA256",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_DHE_DSS_WITH_AES_256_CBC_SHA256",
|
||||
"TLS_DHE_DSS_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384",
|
||||
"TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256",
|
||||
"TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA",
|
||||
"TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256",
|
||||
"TLS_RSA_WITH_CAMELLIA_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256",
|
||||
"TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256",
|
||||
"TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA",
|
||||
"TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256",
|
||||
"TLS_RSA_WITH_CAMELLIA_128_CBC_SHA",
|
||||
"TLS_DHE_RSA_WITH_SEED_CBC_SHA",
|
||||
"TLS_DHE_DSS_WITH_SEED_CBC_SHA",
|
||||
"TLS_RSA_WITH_SEED_CBC_SHA",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
13
tools/vendor/github.com/GoASTScanner/gas/rules/unsafe.go
generated
vendored
13
tools/vendor/github.com/GoASTScanner/gas/rules/unsafe.go
generated
vendored
@ -15,25 +15,28 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type UsingUnsafe struct {
|
||||
type usingUnsafe struct {
|
||||
gas.MetaData
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func (r *UsingUnsafe) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
func (r *usingUnsafe) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if _, matches := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matches {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewUsingUnsafe(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &UsingUnsafe{
|
||||
// NewUsingUnsafe rule detects the use of the unsafe package. This is only
|
||||
// really useful for auditing purposes.
|
||||
func NewUsingUnsafe(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
return &usingUnsafe{
|
||||
pkg: "unsafe",
|
||||
calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"},
|
||||
MetaData: gas.MetaData{
|
||||
|
12
tools/vendor/github.com/GoASTScanner/gas/rules/weakcrypto.go
generated
vendored
12
tools/vendor/github.com/GoASTScanner/gas/rules/weakcrypto.go
generated
vendored
@ -17,15 +17,15 @@ package rules
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas"
|
||||
)
|
||||
|
||||
type UsesWeakCryptography struct {
|
||||
type usesWeakCryptography struct {
|
||||
gas.MetaData
|
||||
blacklist map[string][]string
|
||||
}
|
||||
|
||||
func (r *UsesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
func (r *usesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
|
||||
for pkg, funcs := range r.blacklist {
|
||||
if _, matched := gas.MatchCallByPackage(n, c, pkg, funcs...); matched {
|
||||
@ -35,13 +35,13 @@ func (r *UsesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, er
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Uses des.* md5.* or rc4.*
|
||||
func NewUsesWeakCryptography(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// NewUsesWeakCryptography detects uses of des.* md5.* or rc4.*
|
||||
func NewUsesWeakCryptography(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||
calls := make(map[string][]string)
|
||||
calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"}
|
||||
calls["crypto/md5"] = []string{"New", "Sum"}
|
||||
calls["crypto/rc4"] = []string{"NewCipher"}
|
||||
rule := &UsesWeakCryptography{
|
||||
rule := &usesWeakCryptography{
|
||||
blacklist: calls,
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
|
276
tools/vendor/github.com/GoASTScanner/gas/tools.go
generated
vendored
276
tools/vendor/github.com/GoASTScanner/gas/tools.go
generated
vendored
@ -1,276 +0,0 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/importer"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type command func(args ...string)
|
||||
type utilities struct {
|
||||
commands map[string]command
|
||||
call []string
|
||||
}
|
||||
|
||||
// Custom commands / utilities to run instead of default analyzer
|
||||
func newUtils() *utilities {
|
||||
utils := make(map[string]command)
|
||||
utils["ast"] = dumpAst
|
||||
utils["callobj"] = dumpCallObj
|
||||
utils["uses"] = dumpUses
|
||||
utils["types"] = dumpTypes
|
||||
utils["defs"] = dumpDefs
|
||||
utils["comments"] = dumpComments
|
||||
utils["imports"] = dumpImports
|
||||
return &utilities{utils, make([]string, 0)}
|
||||
}
|
||||
|
||||
func (u *utilities) String() string {
|
||||
i := 0
|
||||
keys := make([]string, len(u.commands))
|
||||
for k := range u.commands {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
return strings.Join(keys, ", ")
|
||||
}
|
||||
|
||||
func (u *utilities) Set(opt string) error {
|
||||
if _, ok := u.commands[opt]; !ok {
|
||||
return fmt.Errorf("valid tools are: %s", u.String())
|
||||
|
||||
}
|
||||
u.call = append(u.call, opt)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *utilities) run(args ...string) {
|
||||
for _, util := range u.call {
|
||||
if cmd, ok := u.commands[util]; ok {
|
||||
cmd(args...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func shouldSkip(path string) bool {
|
||||
st, e := os.Stat(path)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Skipping: %s - %s\n", path, e)
|
||||
return true
|
||||
}
|
||||
if st.IsDir() {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Skipping: %s - directory\n", path)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func dumpAst(files ...string) {
|
||||
for _, arg := range files {
|
||||
// Ensure file exists and not a directory
|
||||
if shouldSkip(arg) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create the AST by parsing src.
|
||||
fset := token.NewFileSet() // positions are relative to fset
|
||||
f, err := parser.ParseFile(fset, arg, nil, 0)
|
||||
if err != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Unable to parse file %s\n", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Print the AST. #nosec
|
||||
ast.Print(fset, f)
|
||||
}
|
||||
}
|
||||
|
||||
type context struct {
|
||||
fileset *token.FileSet
|
||||
comments ast.CommentMap
|
||||
info *types.Info
|
||||
pkg *types.Package
|
||||
config *types.Config
|
||||
root *ast.File
|
||||
}
|
||||
|
||||
func createContext(filename string) *context {
|
||||
fileset := token.NewFileSet()
|
||||
root, e := parser.ParseFile(fileset, filename, nil, parser.ParseComments)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Unable to parse file: %s. Reason: %s\n", filename, e)
|
||||
return nil
|
||||
}
|
||||
comments := ast.NewCommentMap(fileset, root, root.Comments)
|
||||
info := &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
Defs: make(map[*ast.Ident]types.Object),
|
||||
Uses: make(map[*ast.Ident]types.Object),
|
||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||
Scopes: make(map[ast.Node]*types.Scope),
|
||||
Implicits: make(map[ast.Node]types.Object),
|
||||
}
|
||||
config := types.Config{Importer: importer.Default()}
|
||||
pkg, e := config.Check("main.go", fileset, []*ast.File{root}, info)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Type check failed for file: %s. Reason: %s\n", filename, e)
|
||||
return nil
|
||||
}
|
||||
return &context{fileset, comments, info, pkg, &config, root}
|
||||
}
|
||||
|
||||
func printObject(obj types.Object) {
|
||||
fmt.Println("OBJECT")
|
||||
if obj == nil {
|
||||
fmt.Println("object is nil")
|
||||
return
|
||||
}
|
||||
fmt.Printf(" Package = %v\n", obj.Pkg())
|
||||
if obj.Pkg() != nil {
|
||||
fmt.Println(" Path = ", obj.Pkg().Path())
|
||||
fmt.Println(" Name = ", obj.Pkg().Name())
|
||||
fmt.Println(" String = ", obj.Pkg().String())
|
||||
}
|
||||
fmt.Printf(" Name = %v\n", obj.Name())
|
||||
fmt.Printf(" Type = %v\n", obj.Type())
|
||||
fmt.Printf(" Id = %v\n", obj.Id())
|
||||
}
|
||||
|
||||
func checkContext(ctx *context, file string) bool {
|
||||
// #nosec
|
||||
if ctx == nil {
|
||||
fmt.Fprintln(os.Stderr, "Failed to create context for file: ", file)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func dumpCallObj(files ...string) {
|
||||
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
ast.Inspect(context.root, func(n ast.Node) bool {
|
||||
var obj types.Object
|
||||
switch node := n.(type) {
|
||||
case *ast.Ident:
|
||||
obj = context.info.ObjectOf(node) //context.info.Uses[node]
|
||||
case *ast.SelectorExpr:
|
||||
obj = context.info.ObjectOf(node.Sel) //context.info.Uses[node.Sel]
|
||||
default:
|
||||
obj = nil
|
||||
}
|
||||
if obj != nil {
|
||||
printObject(obj)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func dumpUses(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for ident, obj := range context.info.Uses {
|
||||
fmt.Printf("IDENT: %v, OBJECT: %v\n", ident, obj)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpTypes(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for expr, tv := range context.info.Types {
|
||||
fmt.Printf("EXPR: %v, TYPE: %v\n", expr, tv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpDefs(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for ident, obj := range context.info.Defs {
|
||||
fmt.Printf("IDENT: %v, OBJ: %v\n", ident, obj)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpComments(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for _, group := range context.comments.Comments() {
|
||||
fmt.Println(group.Text())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpImports(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for _, pkg := range context.pkg.Imports() {
|
||||
fmt.Println(pkg.Path(), pkg.Name())
|
||||
for _, name := range pkg.Scope().Names() {
|
||||
fmt.Println(" => ", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
7
tools/vendor/github.com/GoASTScanner/gas/vendor.conf
generated
vendored
7
tools/vendor/github.com/GoASTScanner/gas/vendor.conf
generated
vendored
@ -1,7 +0,0 @@
|
||||
# package
|
||||
github.com/GoAstScanner/gas
|
||||
|
||||
# import
|
||||
github.com/GoASTScanner/gas cc52ef5
|
||||
github.com/nbutton23/zxcvbn-go a22cb81
|
||||
github.com/ryanuber/go-glob v0.1
|
54
tools/vendor/github.com/alecthomas/gometalinter/README.md
generated
vendored
54
tools/vendor/github.com/alecthomas/gometalinter/README.md
generated
vendored
@ -3,10 +3,13 @@
|
||||
|
||||
<!-- MarkdownTOC -->
|
||||
|
||||
- [Installing](#installing)
|
||||
- [Editor integration](#editor-integration)
|
||||
- [Supported linters](#supported-linters)
|
||||
- [Configuration file](#configuration-file)
|
||||
- [Installing](#installing)
|
||||
- [`Format` key](#format-key)
|
||||
- [Format Methods](#format-methods)
|
||||
- [Adding Custom linters](#adding-custom-linters)
|
||||
- [Comment directives](#comment-directives)
|
||||
- [Quickstart](#quickstart)
|
||||
- [FAQ](#faq)
|
||||
@ -39,6 +42,16 @@ eg.
|
||||
|
||||
It is intended for use with editor/IDE integration.
|
||||
|
||||
## Installing
|
||||
|
||||
There are two options for installing gometalinter.
|
||||
|
||||
1. Install a stable version, eg. `go get -u gopkg.in/alecthomas/gometalinter.v2`.
|
||||
I will generally only tag a new stable version when it has passed the Travis
|
||||
regression tests. The downside is that the binary will be called `gometalinter.v2`.
|
||||
2. Install from HEAD with: `go get -u github.com/alecthomas/gometalinter`.
|
||||
This has the downside that changes to gometalinter may break.
|
||||
|
||||
## Editor integration
|
||||
|
||||
- [SublimeLinter plugin](https://github.com/alecthomas/SublimeLinter-contrib-gometalinter).
|
||||
@ -91,9 +104,11 @@ Additional linters can be added through the command line with `--linter=NAME:COM
|
||||
|
||||
## Configuration file
|
||||
|
||||
gometalinter now supports a JSON configuration file which can be loaded via
|
||||
`--config=<file>`. The format of this file is determined by the `Config` struct
|
||||
in [config.go](https://github.com/alecthomas/gometalinter/blob/master/config.go).
|
||||
gometalinter now supports a JSON configuration file called `.gometalinter.json` that can
|
||||
be placed at the root of your project. The configuration file will be automatically loaded
|
||||
from the working directory or any parent directory and can be overridden by passing
|
||||
`--config=<file>` or ignored with `--no-config`. The format of this file is determined by
|
||||
the `Config` struct in [config.go](https://github.com/alecthomas/gometalinter/blob/master/config.go).
|
||||
|
||||
The configuration file mostly corresponds to command-line flags, with the following exceptions:
|
||||
|
||||
@ -110,6 +125,27 @@ Here is an example configuration file:
|
||||
}
|
||||
```
|
||||
|
||||
If a `.gometalinter.json` file is loaded, individual options can still be overridden by
|
||||
passing command-line flags. All flags are parsed in order, meaning configuration passed
|
||||
with the `--config` flag will override any command-line flags passed before and be
|
||||
overridden by flags passed after.
|
||||
|
||||
|
||||
#### `Format` key
|
||||
|
||||
The default `Format` key places the different fields of an `Issue` into a template. this
|
||||
corresponds to the `--format` option command-line flag.
|
||||
|
||||
Default `Format`:
|
||||
```
|
||||
Format: "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})"
|
||||
```
|
||||
|
||||
#### Format Methods
|
||||
|
||||
* `{{.Path.Relative}}` - equivalent to `{{.Path}}` which outputs a relative path to the file
|
||||
* `{{.Path.Abs}}` - outputs an absolute path to the file
|
||||
|
||||
### Adding Custom linters
|
||||
|
||||
Linters can be added and customized from the config file using the `Linters` field.
|
||||
@ -138,16 +174,6 @@ Example:
|
||||
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf:PATH:LINE:MESSAGE' .
|
||||
```
|
||||
|
||||
## Installing
|
||||
|
||||
There are two options for installing gometalinter.
|
||||
|
||||
1. Install a stable version, eg. `go get -u gopkg.in/alecthomas/gometalinter.v1`.
|
||||
I will generally only tag a new stable version when it has passed the Travis
|
||||
regression tests. The downside is that the binary will be called `gometalinter.v1`.
|
||||
2. Install from HEAD with: `go get -u github.com/alecthomas/gometalinter`.
|
||||
This has the downside that changes to gometalinter may break.
|
||||
|
||||
## Comment directives
|
||||
|
||||
gometalinter supports suppression of linter messages via comment directives. The
|
||||
|
2
tools/vendor/github.com/alecthomas/gometalinter/aggregate.go
generated
vendored
2
tools/vendor/github.com/alecthomas/gometalinter/aggregate.go
generated
vendored
@ -25,7 +25,7 @@ func AggregateIssueChan(issues chan *Issue) chan *Issue {
|
||||
go func() {
|
||||
for issue := range issues {
|
||||
key := issueKey{
|
||||
path: issue.Path,
|
||||
path: issue.Path.String(),
|
||||
line: issue.Line,
|
||||
col: issue.Col,
|
||||
message: issue.Message,
|
||||
|
7
tools/vendor/github.com/alecthomas/gometalinter/checkstyle.go
generated
vendored
7
tools/vendor/github.com/alecthomas/gometalinter/checkstyle.go
generated
vendored
@ -33,14 +33,13 @@ func outputToCheckstyle(issues chan *Issue) int {
|
||||
}
|
||||
status := 0
|
||||
for issue := range issues {
|
||||
if lastFile != nil && lastFile.Name != issue.Path {
|
||||
path := issue.Path.Relative()
|
||||
if lastFile != nil && lastFile.Name != path {
|
||||
out.Files = append(out.Files, lastFile)
|
||||
lastFile = nil
|
||||
}
|
||||
if lastFile == nil {
|
||||
lastFile = &checkstyleFile{
|
||||
Name: issue.Path,
|
||||
}
|
||||
lastFile = &checkstyleFile{Name: path}
|
||||
}
|
||||
|
||||
if config.Errors && issue.Severity != Error {
|
||||
|
55
tools/vendor/github.com/alecthomas/gometalinter/config.go
generated
vendored
55
tools/vendor/github.com/alecthomas/gometalinter/config.go
generated
vendored
@ -2,6 +2,8 @@ package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"text/template"
|
||||
"time"
|
||||
@ -38,6 +40,7 @@ type Config struct { // nolint: maligned
|
||||
Vendor bool
|
||||
Cyclo int
|
||||
LineLength int
|
||||
MisspellLocale string
|
||||
MinConfidence float64
|
||||
MinOccurrences int
|
||||
MinConstLength int
|
||||
@ -128,6 +131,7 @@ var config = &Config{
|
||||
Concurrency: runtime.NumCPU(),
|
||||
Cyclo: 10,
|
||||
LineLength: 80,
|
||||
MisspellLocale: "",
|
||||
MinConfidence: 0.8,
|
||||
MinOccurrences: 3,
|
||||
MinConstLength: 3,
|
||||
@ -135,3 +139,54 @@ var config = &Config{
|
||||
Sort: []string{"none"},
|
||||
Deadline: jsonDuration(time.Second * 30),
|
||||
}
|
||||
|
||||
func loadConfigFile(filename string) error {
|
||||
r, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer r.Close() // nolint: errcheck
|
||||
err = json.NewDecoder(r).Decode(config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, disable := range config.Disable {
|
||||
for i, enable := range config.Enable {
|
||||
if enable == disable {
|
||||
config.Enable = append(config.Enable[:i], config.Enable[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func findDefaultConfigFile() (fullPath string, found bool, err error) {
|
||||
prevPath := ""
|
||||
dirPath, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
|
||||
for dirPath != prevPath {
|
||||
fullPath, found, err = findConfigFileInDir(dirPath)
|
||||
if err != nil || found {
|
||||
return fullPath, found, err
|
||||
}
|
||||
prevPath, dirPath = dirPath, filepath.Dir(dirPath)
|
||||
}
|
||||
|
||||
return "", false, nil
|
||||
}
|
||||
|
||||
func findConfigFileInDir(dirPath string) (fullPath string, found bool, err error) {
|
||||
fullPath = filepath.Join(dirPath, defaultConfigPath)
|
||||
if _, err := os.Stat(fullPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return "", false, nil
|
||||
}
|
||||
return "", false, err
|
||||
}
|
||||
|
||||
return fullPath, true, nil
|
||||
}
|
||||
|
16
tools/vendor/github.com/alecthomas/gometalinter/directives.go
generated
vendored
16
tools/vendor/github.com/alecthomas/gometalinter/directives.go
generated
vendored
@ -5,6 +5,7 @@ import (
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
@ -67,11 +68,12 @@ func newDirectiveParser() *directiveParser {
|
||||
// IsIgnored returns true if the given linter issue is ignored by a linter directive.
|
||||
func (d *directiveParser) IsIgnored(issue *Issue) bool {
|
||||
d.lock.Lock()
|
||||
ranges, ok := d.files[issue.Path]
|
||||
path := issue.Path.Relative()
|
||||
ranges, ok := d.files[path]
|
||||
if !ok {
|
||||
ranges = d.parseFile(issue.Path)
|
||||
ranges = d.parseFile(path)
|
||||
sort.Sort(ranges)
|
||||
d.files[issue.Path] = ranges
|
||||
d.files[path] = ranges
|
||||
}
|
||||
d.lock.Unlock()
|
||||
for _, r := range ranges {
|
||||
@ -204,10 +206,16 @@ func filterIssuesViaDirectives(directives *directiveParser, issues chan *Issue)
|
||||
|
||||
func warnOnUnusedDirective(directives *directiveParser) []*Issue {
|
||||
out := []*Issue{}
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
warning("failed to get working directory %s", err)
|
||||
}
|
||||
|
||||
for path, ranges := range directives.Unmatched() {
|
||||
for _, ignore := range ranges {
|
||||
issue, _ := NewIssue("nolint", config.formatTemplate)
|
||||
issue.Path = path
|
||||
issue.Path = newIssuePath(cwd, path)
|
||||
issue.Line = ignore.start
|
||||
issue.Col = ignore.col
|
||||
issue.Message = "nolint directive did not match any issue"
|
||||
|
39
tools/vendor/github.com/alecthomas/gometalinter/execute.go
generated
vendored
39
tools/vendor/github.com/alecthomas/gometalinter/execute.go
generated
vendored
@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
@ -82,6 +81,7 @@ func runLinters(linters map[string]*Linter, paths []string, concurrency int, exc
|
||||
"duplthreshold": fmt.Sprintf("%d", config.DuplThreshold),
|
||||
"mincyclo": fmt.Sprintf("%d", config.Cyclo),
|
||||
"maxlinelength": fmt.Sprintf("%d", config.LineLength),
|
||||
"misspelllocale": fmt.Sprintf("%s", config.MisspellLocale),
|
||||
"min_confidence": fmt.Sprintf("%f", config.MinConfidence),
|
||||
"min_occurrences": fmt.Sprintf("%d", config.MinOccurrences),
|
||||
"min_const_length": fmt.Sprintf("%d", config.MinConstLength),
|
||||
@ -237,8 +237,10 @@ func processOutput(dbg debugFunction, state *linterState, out []byte) {
|
||||
}
|
||||
switch name {
|
||||
case "path":
|
||||
issue.Path = relativePath(cwd, part)
|
||||
|
||||
issue.Path, err = newIssuePathFromAbsPath(cwd, part)
|
||||
if err != nil {
|
||||
warning("failed to make %s a relative path: %s", part, err)
|
||||
}
|
||||
case "line":
|
||||
n, err := strconv.ParseInt(part, 10, 32)
|
||||
kingpin.FatalIfError(err, "line matched invalid integer")
|
||||
@ -273,37 +275,6 @@ func processOutput(dbg debugFunction, state *linterState, out []byte) {
|
||||
}
|
||||
}
|
||||
|
||||
func relativePath(root, path string) string {
|
||||
fallback := path
|
||||
root = resolvePath(root)
|
||||
path = resolvePath(path)
|
||||
var err error
|
||||
path, err = filepath.Rel(root, path)
|
||||
if err != nil {
|
||||
warning("failed to make %s a relative path: %s", fallback, err)
|
||||
return fallback
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func resolvePath(path string) string {
|
||||
var err error
|
||||
fallback := path
|
||||
if !filepath.IsAbs(path) {
|
||||
path, err = filepath.Abs(path)
|
||||
if err != nil {
|
||||
warning("failed to make %s an absolute path: %s", fallback, err)
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
path, err = filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
warning("failed to resolve symlinks in %s: %s", fallback, err)
|
||||
return fallback
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func maybeSortIssues(issues chan *Issue) chan *Issue {
|
||||
if reflect.DeepEqual([]string{"none"}, config.Sort) {
|
||||
return issues
|
||||
|
68
tools/vendor/github.com/alecthomas/gometalinter/issue.go
generated
vendored
68
tools/vendor/github.com/alecthomas/gometalinter/issue.go
generated
vendored
@ -2,8 +2,10 @@ package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
@ -21,13 +23,59 @@ const (
|
||||
Warning Severity = "warning"
|
||||
)
|
||||
|
||||
type IssuePath struct {
|
||||
root string
|
||||
path string
|
||||
}
|
||||
|
||||
func (i IssuePath) String() string {
|
||||
return i.Relative()
|
||||
}
|
||||
|
||||
func (i IssuePath) Relative() string {
|
||||
return i.path
|
||||
}
|
||||
|
||||
func (i IssuePath) Abs() string {
|
||||
return filepath.Join(i.root, i.path)
|
||||
}
|
||||
|
||||
func (i IssuePath) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(i.String())
|
||||
}
|
||||
|
||||
func newIssuePath(root, path string) IssuePath {
|
||||
return IssuePath{root: root, path: path}
|
||||
}
|
||||
|
||||
// newIssuePathFromAbsPath returns a new issuePath from a path that may be
|
||||
// an absolute path. root must be an absolute path.
|
||||
func newIssuePathFromAbsPath(root, path string) (IssuePath, error) {
|
||||
resolvedRoot, err := filepath.EvalSymlinks(root)
|
||||
if err != nil {
|
||||
return newIssuePath(root, path), err
|
||||
}
|
||||
|
||||
resolvedPath, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return newIssuePath(root, path), err
|
||||
}
|
||||
|
||||
if !filepath.IsAbs(path) {
|
||||
return newIssuePath(resolvedRoot, resolvedPath), nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(resolvedRoot, resolvedPath)
|
||||
return newIssuePath(resolvedRoot, relPath), err
|
||||
}
|
||||
|
||||
type Issue struct {
|
||||
Linter string `json:"linter"`
|
||||
Severity Severity `json:"severity"`
|
||||
Path string `json:"path"`
|
||||
Line int `json:"line"`
|
||||
Col int `json:"col"`
|
||||
Message string `json:"message"`
|
||||
Linter string `json:"linter"`
|
||||
Severity Severity `json:"severity"`
|
||||
Path IssuePath `json:"path"`
|
||||
Line int `json:"line"`
|
||||
Col int `json:"col"`
|
||||
Message string `json:"message"`
|
||||
formatTmpl *template.Template
|
||||
}
|
||||
|
||||
@ -50,7 +98,11 @@ func (i *Issue) String() string {
|
||||
if i.Col != 0 {
|
||||
col = fmt.Sprintf("%d", i.Col)
|
||||
}
|
||||
return fmt.Sprintf("%s:%d:%s:%s: %s (%s)", strings.TrimSpace(i.Path), i.Line, col, i.Severity, strings.TrimSpace(i.Message), i.Linter)
|
||||
return fmt.Sprintf("%s:%d:%s:%s: %s (%s)",
|
||||
strings.TrimSpace(i.Path.Relative()),
|
||||
i.Line, col, i.Severity,
|
||||
strings.TrimSpace(i.Message),
|
||||
i.Linter)
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
_ = i.formatTmpl.Execute(buf, i)
|
||||
@ -76,7 +128,7 @@ func CompareIssue(l, r Issue, order []string) bool {
|
||||
for _, key := range order {
|
||||
switch {
|
||||
case key == "path" && l.Path != r.Path:
|
||||
return l.Path < r.Path
|
||||
return l.Path.String() < r.Path.String()
|
||||
case key == "line" && l.Line != r.Line:
|
||||
return l.Line < r.Line
|
||||
case key == "column" && l.Col != r.Col:
|
||||
|
2
tools/vendor/github.com/alecthomas/gometalinter/linters.go
generated
vendored
2
tools/vendor/github.com/alecthomas/gometalinter/linters.go
generated
vendored
@ -328,7 +328,7 @@ var defaultLinters = map[string]LinterConfig{
|
||||
defaultEnabled: true,
|
||||
},
|
||||
"misspell": {
|
||||
Command: `misspell -j 1`,
|
||||
Command: `misspell -j 1 --locale "{misspelllocale}"`,
|
||||
Pattern: `PATH:LINE:COL:MESSAGE`,
|
||||
InstallFrom: "github.com/client9/misspell/cmd/misspell",
|
||||
PartitionStrategy: partitionPathsAsFiles,
|
||||
|
77
tools/vendor/github.com/alecthomas/gometalinter/main.go
generated
vendored
77
tools/vendor/github.com/alecthomas/gometalinter/main.go
generated
vendored
@ -21,12 +21,19 @@ var (
|
||||
// Locations to look for vendored linters.
|
||||
vendoredSearchPaths = [][]string{
|
||||
{"github.com", "alecthomas", "gometalinter", "_linters"},
|
||||
{"gopkg.in", "alecthomas", "gometalinter.v1", "_linters"},
|
||||
{"gopkg.in", "alecthomas", "gometalinter.v2", "_linters"},
|
||||
}
|
||||
defaultConfigPath = ".gometalinter.json"
|
||||
|
||||
// Populated by goreleaser.
|
||||
version = "master"
|
||||
commit = "?"
|
||||
date = ""
|
||||
)
|
||||
|
||||
func setupFlags(app *kingpin.Application) {
|
||||
app.Flag("config", "Load JSON configuration from file.").Envar("GOMETALINTER_CONFIG").Action(loadConfig).String()
|
||||
app.Flag("no-config", "Disable automatic loading of config file.").Bool()
|
||||
app.Flag("disable", "Disable previously enabled linters.").PlaceHolder("LINTER").Short('D').Action(disableAction).Strings()
|
||||
app.Flag("enable", "Enable previously disabled linters.").PlaceHolder("LINTER").Short('E').Action(enableAction).Strings()
|
||||
app.Flag("linter", "Define a linter.").PlaceHolder("NAME:COMMAND:PATTERN").Action(cliLinterOverrides).StringMap()
|
||||
@ -35,12 +42,12 @@ func setupFlags(app *kingpin.Application) {
|
||||
app.Flag("disable-all", "Disable all linters.").Action(disableAllAction).Bool()
|
||||
app.Flag("enable-all", "Enable all linters.").Action(enableAllAction).Bool()
|
||||
app.Flag("format", "Output format.").PlaceHolder(config.Format).StringVar(&config.Format)
|
||||
app.Flag("vendored-linters", "Use vendored linters (recommended).").BoolVar(&config.VendoredLinters)
|
||||
app.Flag("vendored-linters", "Use vendored linters (recommended) (DEPRECATED - use binary packages).").BoolVar(&config.VendoredLinters)
|
||||
app.Flag("fast", "Only run fast linters.").BoolVar(&config.Fast)
|
||||
app.Flag("install", "Attempt to install all known linters.").Short('i').BoolVar(&config.Install)
|
||||
app.Flag("update", "Pass -u to go tool when installing.").Short('u').BoolVar(&config.Update)
|
||||
app.Flag("force", "Pass -f to go tool when installing.").Short('f').BoolVar(&config.Force)
|
||||
app.Flag("download-only", "Pass -d to go tool when installing.").BoolVar(&config.DownloadOnly)
|
||||
app.Flag("install", "Attempt to install all known linters (DEPRECATED - use binary packages).").Short('i').BoolVar(&config.Install)
|
||||
app.Flag("update", "Pass -u to go tool when installing (DEPRECATED - use binary packages).").Short('u').BoolVar(&config.Update)
|
||||
app.Flag("force", "Pass -f to go tool when installing (DEPRECATED - use binary packages).").Short('f').BoolVar(&config.Force)
|
||||
app.Flag("download-only", "Pass -d to go tool when installing (DEPRECATED - use binary packages).").BoolVar(&config.DownloadOnly)
|
||||
app.Flag("debug", "Display messages for failed linters, etc.").Short('d').BoolVar(&config.Debug)
|
||||
app.Flag("concurrency", "Number of concurrent linters to run.").PlaceHolder(fmt.Sprintf("%d", runtime.NumCPU())).Short('j').IntVar(&config.Concurrency)
|
||||
app.Flag("exclude", "Exclude messages matching these regular expressions.").Short('e').PlaceHolder("REGEXP").StringsVar(&config.Exclude)
|
||||
@ -49,6 +56,7 @@ func setupFlags(app *kingpin.Application) {
|
||||
app.Flag("vendor", "Enable vendoring support (skips 'vendor' directories and sets GO15VENDOREXPERIMENT=1).").BoolVar(&config.Vendor)
|
||||
app.Flag("cyclo-over", "Report functions with cyclomatic complexity over N (using gocyclo).").PlaceHolder("10").IntVar(&config.Cyclo)
|
||||
app.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength)
|
||||
app.Flag("misspell-locale", "Specify locale to use (using misspell).").PlaceHolder("").StringVar(&config.MisspellLocale)
|
||||
app.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence)
|
||||
app.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences)
|
||||
app.Flag("min-const-length", "Minimum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength)
|
||||
@ -81,25 +89,27 @@ func cliLinterOverrides(app *kingpin.Application, element *kingpin.ParseElement,
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
r, err := os.Open(*element.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
func loadDefaultConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
if element != nil {
|
||||
return nil
|
||||
}
|
||||
defer r.Close() // nolint: errcheck
|
||||
err = json.NewDecoder(r).Decode(config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, disable := range config.Disable {
|
||||
for i, enable := range config.Enable {
|
||||
if enable == disable {
|
||||
config.Enable = append(config.Enable[:i], config.Enable[i+1:]...)
|
||||
break
|
||||
}
|
||||
|
||||
for _, elem := range ctx.Elements {
|
||||
if f := elem.OneOf.Flag; f == app.GetFlag("config") || f == app.GetFlag("no-config") {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return err
|
||||
|
||||
configFile, found, err := findDefaultConfigFile()
|
||||
if err != nil || !found {
|
||||
return err
|
||||
}
|
||||
|
||||
return loadConfigFile(configFile)
|
||||
}
|
||||
|
||||
func loadConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
return loadConfigFile(*element.Value)
|
||||
}
|
||||
|
||||
func disableAction(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
@ -135,7 +145,9 @@ type debugFunction func(format string, args ...interface{})
|
||||
|
||||
func debug(format string, args ...interface{}) {
|
||||
if config.Debug {
|
||||
fmt.Fprintf(os.Stderr, "DEBUG: "+format+"\n", args...)
|
||||
t := time.Now().UTC()
|
||||
fmt.Fprintf(os.Stderr, "DEBUG: [%s] ", t.Format(time.StampMilli))
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
}
|
||||
}
|
||||
|
||||
@ -171,8 +183,10 @@ func formatSeverity() string {
|
||||
}
|
||||
|
||||
func main() {
|
||||
kingpin.Version(fmt.Sprintf("gometalinter version %s built from %s on %s", version, commit, date))
|
||||
pathsArg := kingpin.Arg("path", "Directories to lint. Defaults to \".\". <path>/... will recurse.").Strings()
|
||||
app := kingpin.CommandLine
|
||||
app.Action(loadDefaultConfig)
|
||||
setupFlags(app)
|
||||
app.Help = fmt.Sprintf(`Aggregate and normalise the output of a whole bunch of Go linters.
|
||||
|
||||
@ -233,15 +247,6 @@ func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Rege
|
||||
if !config.EnableGC {
|
||||
_ = os.Setenv("GOGC", "off")
|
||||
}
|
||||
if config.VendoredLinters && config.Install && config.Update {
|
||||
warning(`Linters are now vendored by default, --update ignored. The original
|
||||
behaviour can be re-enabled with --no-vendored-linters.
|
||||
|
||||
To request an update for a vendored linter file an issue at:
|
||||
https://github.com/alecthomas/gometalinter/issues/new
|
||||
`)
|
||||
config.Update = false
|
||||
}
|
||||
// Force sorting by path if checkstyle mode is selected
|
||||
// !jsonFlag check is required to handle:
|
||||
// gometalinter --json --checkstyle --sort=severity
|
||||
@ -476,6 +481,14 @@ func addGoBinsToPath(gopaths []string) []string {
|
||||
// configureEnvironmentForInstall sets GOPATH and GOBIN so that vendored linters
|
||||
// can be installed
|
||||
func configureEnvironmentForInstall() {
|
||||
if config.Update {
|
||||
warning(`Linters are now vendored by default, --update ignored. The original
|
||||
behaviour can be re-enabled with --no-vendored-linters.
|
||||
|
||||
To request an update for a vendored linter file an issue at:
|
||||
https://github.com/alecthomas/gometalinter/issues/new
|
||||
`)
|
||||
}
|
||||
gopaths := getGoPathList()
|
||||
vendorRoot := findVendoredLinters()
|
||||
if vendorRoot == "" {
|
||||
|
2
tools/vendor/github.com/golang/lint/golint/golint.go
generated
vendored
2
tools/vendor/github.com/golang/lint/golint/golint.go
generated
vendored
@ -16,7 +16,7 @@ import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/golang/lint"
|
||||
"golang.org/x/lint"
|
||||
)
|
||||
|
||||
var (
|
||||
|
7
tools/vendor/github.com/golang/lint/golint/import.go
generated
vendored
7
tools/vendor/github.com/golang/lint/golint/import.go
generated
vendored
@ -22,11 +22,10 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
var buildContext = build.Default
|
||||
|
||||
var (
|
||||
goroot = filepath.Clean(runtime.GOROOT())
|
||||
gorootSrc = filepath.Join(goroot, "src")
|
||||
buildContext = build.Default
|
||||
goroot = filepath.Clean(runtime.GOROOT())
|
||||
gorootSrc = filepath.Join(goroot, "src")
|
||||
)
|
||||
|
||||
// importPathsNoDotExpansion returns the import paths to use for the given
|
||||
|
6
tools/vendor/github.com/kisielk/errcheck/go.mod
generated
vendored
Normal file
6
tools/vendor/github.com/kisielk/errcheck/go.mod
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
module "github.com/kisielk/errcheck"
|
||||
|
||||
require (
|
||||
"github.com/kisielk/gotool" v1.0.0
|
||||
"golang.org/x/tools" v0.0.0-20180221164845-07fd8470d635
|
||||
)
|
26
tools/vendor/github.com/mattn/goveralls/README.md
generated
vendored
26
tools/vendor/github.com/mattn/goveralls/README.md
generated
vendored
@ -137,6 +137,32 @@ test:
|
||||
|
||||
For more information, See https://coveralls.zendesk.com/hc/en-us/articles/201342809-Go
|
||||
|
||||
## Sempahore
|
||||
|
||||
Store your Coveralls API token in `Environment Variables`:
|
||||
|
||||
```
|
||||
COVERALLS_TOKEN=your_token_goes_here
|
||||
```
|
||||
|
||||
More instructions on how to do this can be found in the [Semahore documentation](https://semaphoreci.com/docs/exporting-environment-variables.html).
|
||||
|
||||
Replace the `go test` line in your `Commands` with these lines:
|
||||
|
||||
```
|
||||
$ go get github.com/mattn/goveralls
|
||||
$ goveralls -service semaphore
|
||||
```
|
||||
|
||||
`goveralls` automatically use the environment variable `COVERALLS_TOKEN` as the
|
||||
default value for `-repotoken`.
|
||||
|
||||
You can use the `-v` flag to see verbose output from the test suite:
|
||||
|
||||
```
|
||||
$ goveralls -v -service semaphore
|
||||
```
|
||||
|
||||
# Authors
|
||||
|
||||
* Yasuhiro Matsumoto (a.k.a. mattn)
|
||||
|
8
tools/vendor/github.com/mattn/goveralls/goveralls.go
generated
vendored
8
tools/vendor/github.com/mattn/goveralls/goveralls.go
generated
vendored
@ -54,6 +54,7 @@ var (
|
||||
service = flag.String("service", "travis-ci", "The CI service or other environment in which the test suite was run. ")
|
||||
shallow = flag.Bool("shallow", false, "Shallow coveralls internal server errors")
|
||||
ignore = flag.String("ignore", "", "Comma separated files to ignore")
|
||||
show = flag.Bool("show", false, "Show which package is being tested")
|
||||
)
|
||||
|
||||
// usage supplants package flag's Usage variable
|
||||
@ -149,6 +150,9 @@ func getCoverage() ([]*SourceFile, error) {
|
||||
args = append(args, line)
|
||||
cmd.Args = args
|
||||
|
||||
if *show {
|
||||
fmt.Println("goveralls:", line)
|
||||
}
|
||||
err = cmd.Run()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%v: %v", err, outBuf.String())
|
||||
@ -234,6 +238,8 @@ func process() error {
|
||||
jobId = circleCiJobId
|
||||
} else if appveyorJobId := os.Getenv("APPVEYOR_JOB_ID"); appveyorJobId != "" {
|
||||
jobId = appveyorJobId
|
||||
} else if semaphoreJobId := os.Getenv("SEMPAHORE_BUILD_NUMBER"); semaphoreJobId != "" {
|
||||
jobId = semaphoreJobId
|
||||
}
|
||||
|
||||
if *repotoken == "" {
|
||||
@ -250,6 +256,8 @@ func process() error {
|
||||
pullRequest = regexp.MustCompile(`[0-9]+$`).FindString(prURL)
|
||||
} else if prNumber := os.Getenv("APPVEYOR_PULL_REQUEST_NUMBER"); prNumber != "" {
|
||||
pullRequest = prNumber
|
||||
} else if prNumber := os.Getenv("PULL_REQUEST_NUMBER"); prNumber != "" {
|
||||
pullRequest = prNumber
|
||||
}
|
||||
|
||||
sourceFiles, err := getCoverage()
|
||||
|
10
tools/vendor/github.com/opennota/check/cmd/varcheck/varcheck.go
generated
vendored
10
tools/vendor/github.com/opennota/check/cmd/varcheck/varcheck.go
generated
vendored
@ -24,13 +24,15 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"go/types"
|
||||
|
||||
"github.com/kisielk/gotool"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
reportExported = flag.Bool("e", false, "Report exported variables and constants")
|
||||
buildTags = flag.String("tags", "", "Build tags")
|
||||
)
|
||||
|
||||
type object struct {
|
||||
@ -102,6 +104,9 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor {
|
||||
for _, val := range node.Values {
|
||||
ast.Walk(v, val)
|
||||
}
|
||||
if node.Type != nil {
|
||||
ast.Walk(v, node.Type)
|
||||
}
|
||||
return nil
|
||||
|
||||
case *ast.FuncDecl:
|
||||
@ -133,6 +138,9 @@ func main() {
|
||||
}
|
||||
|
||||
ctx := build.Default
|
||||
if *buildTags != "" {
|
||||
ctx.BuildTags = strings.Fields(*buildTags)
|
||||
}
|
||||
loadcfg := loader.Config{
|
||||
Build: &ctx,
|
||||
}
|
||||
|
9
tools/vendor/github.com/stripe/safesql/README.md
generated
vendored
9
tools/vendor/github.com/stripe/safesql/README.md
generated
vendored
@ -31,8 +31,8 @@ How does it work?
|
||||
-----------------
|
||||
|
||||
SafeSQL uses the static analysis utilities in [go/tools][tools] to search for
|
||||
all call sites of each of the `query` functions in package [database/sql][sql]
|
||||
(i.e., functions which accept a `string` parameter named `query`). It then makes
|
||||
all call sites of each of the `query` functions in packages ([database/sql][sql],[github.com/jinzhu/gorm][gorm],[github.com/jmoiron/sqlx][sqlx])
|
||||
(i.e., functions which accept a parameter named `query`,`sql`). It then makes
|
||||
sure that every such call site uses a query that is a compile-time constant.
|
||||
|
||||
The principle behind SafeSQL's safety guarantees is that queries that are
|
||||
@ -44,6 +44,8 @@ will not be allowed.
|
||||
|
||||
[tools]: https://godoc.org/golang.org/x/tools/go
|
||||
[sql]: http://golang.org/pkg/database/sql/
|
||||
[sqlx]: https://github.com/jmoiron/sqlx
|
||||
[gorm]: https://github.com/jinzhu/gorm
|
||||
|
||||
False positives
|
||||
---------------
|
||||
@ -66,8 +68,6 @@ a fundamental limitation: SafeSQL could recursively trace the `query` argument
|
||||
through every intervening helper function to ensure that its argument is always
|
||||
constant, but this code has yet to be written.
|
||||
|
||||
If you use a wrapper for `database/sql` (e.g., [`sqlx`][sqlx]), it's likely
|
||||
SafeSQL will not work for you because of this.
|
||||
|
||||
The second sort of false positive is based on a limitation in the sort of
|
||||
analysis SafeSQL performs: there are many safe SQL statements which are not
|
||||
@ -76,4 +76,3 @@ static analysis techniques (such as taint analysis) or user-provided safety
|
||||
annotations would be able to reduce the number of false positives, but this is
|
||||
expected to be a significant undertaking.
|
||||
|
||||
[sqlx]: https://github.com/jmoiron/sqlx
|
||||
|
109
tools/vendor/github.com/stripe/safesql/safesql.go
generated
vendored
109
tools/vendor/github.com/stripe/safesql/safesql.go
generated
vendored
@ -9,6 +9,7 @@ import (
|
||||
"go/build"
|
||||
"go/types"
|
||||
"os"
|
||||
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
@ -19,6 +20,27 @@ import (
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
)
|
||||
|
||||
type sqlPackage struct {
|
||||
packageName string
|
||||
paramNames []string
|
||||
enable bool
|
||||
}
|
||||
|
||||
var sqlPackages = []sqlPackage{
|
||||
{
|
||||
packageName: "database/sql",
|
||||
paramNames: []string{"query"},
|
||||
},
|
||||
{
|
||||
packageName: "github.com/jinzhu/gorm",
|
||||
paramNames: []string{"sql", "query"},
|
||||
},
|
||||
{
|
||||
packageName: "github.com/jmoiron/sqlx",
|
||||
paramNames: []string{"query"},
|
||||
},
|
||||
}
|
||||
|
||||
func main() {
|
||||
var verbose, quiet bool
|
||||
flag.BoolVar(&verbose, "v", false, "Verbose mode")
|
||||
@ -38,21 +60,45 @@ func main() {
|
||||
c := loader.Config{
|
||||
FindPackage: FindPackage,
|
||||
}
|
||||
c.Import("database/sql")
|
||||
for _, pkg := range pkgs {
|
||||
c.Import(pkg)
|
||||
}
|
||||
p, err := c.Load()
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("error loading packages %v: %v\n", pkgs, err)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
imports := getImports(p)
|
||||
existOne := false
|
||||
for i := range sqlPackages {
|
||||
if _, exist := imports[sqlPackages[i].packageName]; exist {
|
||||
if verbose {
|
||||
fmt.Printf("Enabling support for %s\n", sqlPackages[i].packageName)
|
||||
}
|
||||
sqlPackages[i].enable = true
|
||||
existOne = true
|
||||
}
|
||||
}
|
||||
if !existOne {
|
||||
fmt.Printf("No packages in %v include a supported database driver", pkgs)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
s := ssautil.CreateProgram(p, 0)
|
||||
s.Build()
|
||||
|
||||
qms := FindQueryMethods(p.Package("database/sql").Pkg, s)
|
||||
qms := make([]*QueryMethod, 0)
|
||||
|
||||
for i := range sqlPackages {
|
||||
if sqlPackages[i].enable {
|
||||
qms = append(qms, FindQueryMethods(sqlPackages[i], p.Package(sqlPackages[i].packageName).Pkg, s)...)
|
||||
}
|
||||
}
|
||||
|
||||
if verbose {
|
||||
fmt.Println("database/sql functions that accept queries:")
|
||||
fmt.Println("database driver functions that accept queries:")
|
||||
for _, m := range qms {
|
||||
fmt.Printf("- %s (param %d)\n", m.Func, m.Param)
|
||||
}
|
||||
@ -75,6 +121,7 @@ func main() {
|
||||
}
|
||||
|
||||
bad := FindNonConstCalls(res.CallGraph, qms)
|
||||
|
||||
if len(bad) == 0 {
|
||||
if !quiet {
|
||||
fmt.Println(`You're safe from SQL injection! Yay \o/`)
|
||||
@ -82,14 +129,19 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d potentially unsafe SQL statements:\n", len(bad))
|
||||
if verbose {
|
||||
fmt.Printf("Found %d potentially unsafe SQL statements:\n", len(bad))
|
||||
}
|
||||
|
||||
for _, ci := range bad {
|
||||
pos := p.Fset.Position(ci.Pos())
|
||||
fmt.Printf("- %s\n", pos)
|
||||
}
|
||||
fmt.Println("Please ensure that all SQL queries you use are compile-time constants.")
|
||||
fmt.Println("You should always use parameterized queries or prepared statements")
|
||||
fmt.Println("instead of building queries from strings.")
|
||||
if verbose {
|
||||
fmt.Println("Please ensure that all SQL queries you use are compile-time constants.")
|
||||
fmt.Println("You should always use parameterized queries or prepared statements")
|
||||
fmt.Println("instead of building queries from strings.")
|
||||
}
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@ -104,7 +156,7 @@ type QueryMethod struct {
|
||||
|
||||
// FindQueryMethods locates all methods in the given package (assumed to be
|
||||
// package database/sql) with a string parameter named "query".
|
||||
func FindQueryMethods(sql *types.Package, ssa *ssa.Program) []*QueryMethod {
|
||||
func FindQueryMethods(sqlPackages sqlPackage, sql *types.Package, ssa *ssa.Program) []*QueryMethod {
|
||||
methods := make([]*QueryMethod, 0)
|
||||
scope := sql.Scope()
|
||||
for _, name := range scope.Names() {
|
||||
@ -122,7 +174,7 @@ func FindQueryMethods(sql *types.Package, ssa *ssa.Program) []*QueryMethod {
|
||||
continue
|
||||
}
|
||||
s := m.Type().(*types.Signature)
|
||||
if num, ok := FuncHasQuery(s); ok {
|
||||
if num, ok := FuncHasQuery(sqlPackages, s); ok {
|
||||
methods = append(methods, &QueryMethod{
|
||||
Func: m,
|
||||
SSA: ssa.FuncValue(m),
|
||||
@ -135,16 +187,16 @@ func FindQueryMethods(sql *types.Package, ssa *ssa.Program) []*QueryMethod {
|
||||
return methods
|
||||
}
|
||||
|
||||
var stringType types.Type = types.Typ[types.String]
|
||||
|
||||
// FuncHasQuery returns the offset of the string parameter named "query", or
|
||||
// none if no such parameter exists.
|
||||
func FuncHasQuery(s *types.Signature) (offset int, ok bool) {
|
||||
func FuncHasQuery(sqlPackages sqlPackage, s *types.Signature) (offset int, ok bool) {
|
||||
params := s.Params()
|
||||
for i := 0; i < params.Len(); i++ {
|
||||
v := params.At(i)
|
||||
if v.Name() == "query" && v.Type() == stringType {
|
||||
return i, true
|
||||
for _, paramName := range sqlPackages.paramNames {
|
||||
if v.Name() == paramName {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
@ -164,6 +216,16 @@ func FindMains(p *loader.Program, s *ssa.Program) []*ssa.Package {
|
||||
return mains
|
||||
}
|
||||
|
||||
func getImports(p *loader.Program) map[string]interface{} {
|
||||
pkgs := make(map[string]interface{})
|
||||
for _, pkg := range p.AllPackages {
|
||||
if pkg.Importable {
|
||||
pkgs[pkg.Pkg.Path()] = nil
|
||||
}
|
||||
}
|
||||
return pkgs
|
||||
}
|
||||
|
||||
// FindNonConstCalls returns the set of callsites of the given set of methods
|
||||
// for which the "query" parameter is not a compile-time constant.
|
||||
func FindNonConstCalls(cg *callgraph.Graph, qms []*QueryMethod) []ssa.CallInstruction {
|
||||
@ -186,6 +248,18 @@ func FindNonConstCalls(cg *callgraph.Graph, qms []*QueryMethod) []ssa.CallInstru
|
||||
if _, ok := okFuncs[edge.Site.Parent()]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
isInternalSQLPkg := false
|
||||
for _, pkg := range sqlPackages {
|
||||
if pkg.packageName == edge.Caller.Func.Pkg.Pkg.Path() {
|
||||
isInternalSQLPkg = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if isInternalSQLPkg {
|
||||
continue
|
||||
}
|
||||
|
||||
cc := edge.Site.Common()
|
||||
args := cc.Args
|
||||
// The first parameter is occasionally the receiver.
|
||||
@ -195,7 +269,14 @@ func FindNonConstCalls(cg *callgraph.Graph, qms []*QueryMethod) []ssa.CallInstru
|
||||
panic("arg count mismatch")
|
||||
}
|
||||
v := args[m.Param]
|
||||
|
||||
if _, ok := v.(*ssa.Const); !ok {
|
||||
if inter, ok := v.(*ssa.MakeInterface); ok && types.IsInterface(v.(*ssa.MakeInterface).Type()) {
|
||||
if inter.X.Referrers() == nil || inter.X.Type() != types.Typ[types.String] {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
bad = append(bad, edge.Site)
|
||||
}
|
||||
}
|
||||
|
79
tools/vendor/github.com/tmthrgd/go-bindata/CONTRIBUTING.md
generated
vendored
Normal file
79
tools/vendor/github.com/tmthrgd/go-bindata/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
## Contribution guidelines.
|
||||
|
||||
So you wish to contribute to this project? Fantastic!
|
||||
Here are a few guidelines to help you do this in a
|
||||
streamlined fashion.
|
||||
|
||||
|
||||
## Bug reports
|
||||
|
||||
When supplying a bug report, please consider the following guidelines.
|
||||
These serve to make it easier for us to address the issue and find a solution.
|
||||
Most of these are pretty self-evident, but sometimes it is still necessary
|
||||
to reiterate them.
|
||||
|
||||
* Be clear in the way you express the problem. Use simple language and
|
||||
just enough of it to clearly define the issue. Not everyone is a native
|
||||
English speaker. And while most can handle themselves pretty well,
|
||||
it helps to stay away from more esoteric vocabulary.
|
||||
|
||||
Be patient with non-native English speakers. If their bug reports
|
||||
or comments are hard to understand, just ask for clarification.
|
||||
Do not start guessing at their meaning, as this may just lead to
|
||||
more confusion and misunderstandings.
|
||||
* Clearly define any information which is relevant to the problem.
|
||||
This includes library versions, operating system and any other
|
||||
external dependencies which may be needed.
|
||||
* Where applicable, provide a step-by-step listing of the way to
|
||||
reproduce the problem. Make sure this is the simplest possible
|
||||
way to do so. Omit any and all unneccesary steps, because they may
|
||||
just complicate our understanding of the real problem.
|
||||
If need be, create a whole new code project on your local machine,
|
||||
which specifically tries to create the problem you are running into;
|
||||
nothing more, nothing less.
|
||||
|
||||
Include this program in the bug report. It often suffices to paste
|
||||
the code in a [Gist](https://gist.github.com) or on the
|
||||
[Go playground](http://play.golang.org).
|
||||
* If possible, provide us with a listing of the steps you have already
|
||||
undertaken to solve the problem. This can save us a great deal of
|
||||
wasted time, trying out solutions you have already covered.
|
||||
|
||||
|
||||
## Pull requests
|
||||
|
||||
Bug reports are great. Supplying fixes to bugs is even better.
|
||||
When submitting a pull request, the following guidelines are
|
||||
good to keep in mind:
|
||||
|
||||
* `go fmt`: **Always** run your code through `go fmt`, before
|
||||
committing it. Code has to be readable by many different
|
||||
people. And the only way this will be as painless as possible,
|
||||
is if we all stick to the same code style.
|
||||
|
||||
Some of our projects may have automated build-servers hooked up
|
||||
to commit hooks. These will vet any submitted code and determine
|
||||
if it meets a set of properties. One of which is code formatting.
|
||||
These servers will outright deny a submission which has not been
|
||||
run through `go fmt`, even if the code itself is correct.
|
||||
|
||||
We try to maintain a zero-tolerance policy on this matter,
|
||||
because consistently formatted code makes life a great deal
|
||||
easier for everyone involved.
|
||||
* Commit log messages: When committing changes, do so often and
|
||||
clearly -- Even if you have changed only 1 character in a code
|
||||
comment. This means that commit log messages should clearly state
|
||||
exactly what the change does and why. If it fixes a known issue,
|
||||
then mention the issue number in the commit log. E.g.:
|
||||
|
||||
> Fixes return value for `foo/boo.Baz()` to be consistent with
|
||||
> the rest of the API. This addresses issue #32
|
||||
|
||||
Do not pile a lot of unrelated changes into a single commit.
|
||||
Pick and choose only those changes for a single commit, which are
|
||||
directly related. We would much rather see a hundred commits
|
||||
saying nothing but `"Runs go fmt"` in between any real fixes
|
||||
than have these style changes embedded in those real fixes.
|
||||
It creates a lot of noise when trying to review code.
|
||||
|
||||
|
54
tools/vendor/github.com/tmthrgd/go-bindata/LICENSE
generated
vendored
Normal file
54
tools/vendor/github.com/tmthrgd/go-bindata/LICENSE
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
Copyright (c) 2017, Tom Thorogood.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the Tom Thorogood nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---- Portions of the source code are also covered by the following license: ----
|
||||
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
189
tools/vendor/github.com/tmthrgd/go-bindata/README.md
generated
vendored
Normal file
189
tools/vendor/github.com/tmthrgd/go-bindata/README.md
generated
vendored
Normal file
@ -0,0 +1,189 @@
|
||||
# go-bindata (forked by tmthrgd)
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/tmthrgd/go-bindata?status.svg)](https://godoc.org/github.com/tmthrgd/go-bindata)
|
||||
[![Build Status](https://travis-ci.org/tmthrgd/go-bindata.svg?branch=master)](https://travis-ci.org/tmthrgd/go-bindata)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/tmthrgd/go-bindata)](https://goreportcard.com/report/github.com/tmthrgd/go-bindata)
|
||||
|
||||
This is a rewrite of go-bindata that started life as fork of a fork. It
|
||||
was forked by [lestrrat](https://github.com/lestrrat/go-bindata) as
|
||||
[jteeuwen](https://github.com/jteeuwen/go-bindata) seemed to have
|
||||
abandoned the project.
|
||||
|
||||
Since that fork, go-bindata has been
|
||||
[largely rewritten](https://github.com/tmthrgd/go-bindata/compare/3adb6a8b66f07a123c3d44e8f6c7e78bbdd029c2...master)
|
||||
and has become a standalone project. While the generated code has changed,
|
||||
the generated API remains backwards compatible. The
|
||||
[package API](https://godoc.org/github.com/tmthrgd/go-bindata) is not
|
||||
backwards compatible. The CLI remains backwards compatible, but may not be
|
||||
as feature complete as the package API.
|
||||
|
||||
The suggested way of using go-bindata is from a single .go file with an
|
||||
ignore build tag (`// +build ignore`) run with
|
||||
`//go:generate go run path/to/generate.go`. (See
|
||||
[issue #2](https://github.com/tmthrgd/go-bindata/issues/2#issuecomment-290957538)
|
||||
for reference).
|
||||
|
||||
*Nota bene*: Most of the README that follows has not been updated to match
|
||||
the changes made since rewriting go-bindata and likely does not accurately
|
||||
represent the state of go-bindata.
|
||||
|
||||
## bindata
|
||||
|
||||
This package converts any file into manageable Go source code. Useful for
|
||||
embedding binary data into a go program. The file data is optionally gzip
|
||||
compressed before being converted to a raw byte slice.
|
||||
|
||||
### Installation
|
||||
|
||||
To install the library, use the following:
|
||||
|
||||
go get -u github.com/tmthrgd/go-bindata/...
|
||||
|
||||
### Accessing an asset
|
||||
|
||||
To access asset data, we use the `Asset(string) ([]byte, error)` function which
|
||||
is included in the generated output.
|
||||
|
||||
data, err := Asset("pub/style/foo.css")
|
||||
if err != nil {
|
||||
// Asset was not found.
|
||||
}
|
||||
|
||||
// use asset data
|
||||
|
||||
|
||||
### Debug vs Release builds
|
||||
|
||||
When invoking the program with the `-debug` flag, the generated code does
|
||||
not actually include the asset data. Instead, it generates function stubs
|
||||
which load the data from the original file on disk. The asset API remains
|
||||
identical between debug and release builds, so your code will not have to
|
||||
change.
|
||||
|
||||
This is useful during development when you expect the assets to change often.
|
||||
The host application using these assets uses the same API in both cases and
|
||||
will not have to care where the actual data comes from.
|
||||
|
||||
An example is a Go webserver with some embedded, static web content like
|
||||
HTML, JS and CSS files. While developing it, you do not want to rebuild the
|
||||
whole server and restart it every time you make a change to a bit of
|
||||
javascript. You just want to build and launch the server once. Then just press
|
||||
refresh in the browser to see those changes. Embedding the assets with the
|
||||
`debug` flag allows you to do just that. When you are finished developing and
|
||||
ready for deployment, just re-invoke `go-bindata` without the `-debug` flag.
|
||||
It will now embed the latest version of the assets.
|
||||
|
||||
|
||||
### Lower memory footprint
|
||||
|
||||
Using the `-nomemcopy` flag, will alter the way the output file is generated.
|
||||
It will employ a hack that allows us to read the file data directly from
|
||||
the compiled program's `.rodata` section. This ensures that when we
|
||||
call our generated function, we omit unnecessary memcopies.
|
||||
|
||||
The downside of this, is that it requires dependencies on the `reflect` and
|
||||
`unsafe` packages. These may be restricted on platforms like AppEngine and
|
||||
thus prevent you from using this mode.
|
||||
|
||||
Another disadvantage is that the byte slice we create, is strictly read-only.
|
||||
For most use-cases this is not a problem, but if you ever try to alter the
|
||||
returned byte slice, a runtime panic is thrown. Use this mode only on target
|
||||
platforms where memory constraints are an issue.
|
||||
|
||||
The default behaviour is to use the old code generation method. This
|
||||
prevents the two previously mentioned issues, but will employ at least one
|
||||
extra memcopy and thus increase memory requirements.
|
||||
|
||||
For instance, consider the following two examples:
|
||||
|
||||
This would be the default mode, using an extra memcopy but gives a safe
|
||||
implementation without dependencies on `reflect` and `unsafe`:
|
||||
|
||||
```go
|
||||
func myfile() []byte {
|
||||
return []byte{0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a}
|
||||
}
|
||||
```
|
||||
|
||||
Here is the same functionality, but uses the `.rodata` hack.
|
||||
The byte slice returned from this example can not be written to without
|
||||
generating a runtime error.
|
||||
|
||||
```go
|
||||
var _myfile = "\x89\x50\x4e\x47\x0d\x0a\x1a"
|
||||
|
||||
func myfile() []byte {
|
||||
var empty [0]byte
|
||||
sx := (*reflect.StringHeader)(unsafe.Pointer(&_myfile))
|
||||
b := empty[:]
|
||||
bx := (*reflect.SliceHeader)(unsafe.Pointer(&b))
|
||||
bx.Data = sx.Data
|
||||
bx.Len = len(_myfile)
|
||||
bx.Cap = bx.Len
|
||||
return b
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Optional compression
|
||||
|
||||
When the `-nocompress` flag is given, the supplied resource is *not* GZIP
|
||||
compressed before being turned into Go code. The data should still be accessed
|
||||
through a function call, so nothing changes in the usage of the generated file.
|
||||
|
||||
This feature is useful if you do not care for compression, or the supplied
|
||||
resource is already compressed. Doing it again would not add any value and may
|
||||
even increase the size of the data.
|
||||
|
||||
The default behaviour of the program is to use compression.
|
||||
|
||||
|
||||
### Path prefix stripping
|
||||
|
||||
The keys used in the `_bindata` map, are the same as the input file name
|
||||
passed to `go-bindata`. This includes the path. In most cases, this is not
|
||||
desirable, as it puts potentially sensitive information in your code base.
|
||||
For this purpose, the tool supplies another command line flag `-prefix`.
|
||||
This accepts a portion of a path name, which should be stripped off from
|
||||
the map keys and function names.
|
||||
|
||||
For example, running without the `-prefix` flag, we get:
|
||||
|
||||
$ go-bindata /path/to/templates/
|
||||
|
||||
_bindata["/path/to/templates/foo.html"] = path_to_templates_foo_html
|
||||
|
||||
Running with the `-prefix` flag, we get:
|
||||
|
||||
$ go-bindata -prefix "/path/to/" /path/to/templates/
|
||||
|
||||
_bindata["templates/foo.html"] = templates_foo_html
|
||||
|
||||
|
||||
### Build tags
|
||||
|
||||
With the optional `-tags` flag, you can specify any go build tags that
|
||||
must be fulfilled for the output file to be included in a build. This
|
||||
is useful when including binary data in multiple formats, where the desired
|
||||
format is specified at build time with the appropriate tags.
|
||||
|
||||
The tags are appended to a `// +build` line in the beginning of the output file
|
||||
and must follow the build tags syntax specified by the go tool.
|
||||
|
||||
## Testing
|
||||
|
||||
To execute the test case, run the following commands:
|
||||
|
||||
go get -t -u github.com/tmthrgd/go-bindata
|
||||
go test github.com/tmthrgd/go-bindata
|
||||
|
||||
### Test corpus
|
||||
|
||||
To generate the corpus-sha256sums needed for travis, run the following commands:
|
||||
|
||||
[ -d .testcorpus ] && rm -r .testcorpus
|
||||
go test -run TestCorpus -randtests 50 -corpus .testcorpus -gencorpus .
|
||||
cd .testcorpus && sha256sum * > ../corpus-sha256sums; cd ..
|
||||
|
||||
This must be done every time the generated code changes, but can be skipped while working
|
||||
on a pull request until it is ready to merge.
|
22
tools/vendor/github.com/tmthrgd/go-bindata/base32_compat.go
generated
vendored
Normal file
22
tools/vendor/github.com/tmthrgd/go-bindata/base32_compat.go
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.9
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var base32Enc = base32EncodingCompat{
|
||||
base32.NewEncoding("abcdefghijklmnopqrstuvwxyz234567"),
|
||||
}
|
||||
|
||||
type base32EncodingCompat struct{ *base32.Encoding }
|
||||
|
||||
func (enc base32EncodingCompat) EncodeToString(src []byte) string {
|
||||
return strings.TrimSuffix(enc.Encoding.EncodeToString(src), "=")
|
||||
}
|
11
tools/vendor/github.com/tmthrgd/go-bindata/base32_go19.go
generated
vendored
Normal file
11
tools/vendor/github.com/tmthrgd/go-bindata/base32_go19.go
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
// +build go1.9
|
||||
|
||||
package bindata
|
||||
|
||||
import "encoding/base32"
|
||||
|
||||
var base32Enc = base32.NewEncoding("abcdefghijklmnopqrstuvwxyz234567").WithPadding(base32.NoPadding)
|
49
tools/vendor/github.com/tmthrgd/go-bindata/buffers.go
generated
vendored
Normal file
49
tools/vendor/github.com/tmthrgd/go-bindata/buffers.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var bufPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return new(bytes.Buffer)
|
||||
},
|
||||
}
|
||||
|
||||
func (asset *binAsset) copy(w io.Writer) error {
|
||||
rc, err := asset.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
n := 4 * 1024
|
||||
if s, ok := rc.(interface {
|
||||
Stat() (os.FileInfo, error)
|
||||
}); ok {
|
||||
if fi, err := s.Stat(); err == nil {
|
||||
// Don't preallocate a huge buffer, just in case.
|
||||
if size := fi.Size(); size < 1e9 {
|
||||
n = int(size) + bytes.MinRead
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
buf := bufPool.Get().(*bytes.Buffer)
|
||||
buf.Grow(n)
|
||||
|
||||
_, err = io.CopyBuffer(w, rc, buf.Bytes()[:buf.Cap()])
|
||||
|
||||
if closeErr := rc.Close(); err == nil {
|
||||
err = closeErr
|
||||
}
|
||||
|
||||
bufPool.Put(buf)
|
||||
return err
|
||||
}
|
51
tools/vendor/github.com/tmthrgd/go-bindata/common.go
generated
vendored
Normal file
51
tools/vendor/github.com/tmthrgd/go-bindata/common.go
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import "text/template"
|
||||
|
||||
func init() {
|
||||
template.Must(baseTemplate.New("common").Parse(`// Asset loads and returns the asset for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func Asset(name string) ([]byte, error) {
|
||||
data, _, err := AssetAndInfo(name)
|
||||
return data, err
|
||||
}
|
||||
|
||||
// MustAsset is like Asset but panics when Asset would return an error.
|
||||
// It simplifies safe initialization of global variables.
|
||||
func MustAsset(name string) []byte {
|
||||
a, err := Asset(name)
|
||||
if err != nil {
|
||||
panic("asset: Asset(" + name + "): " + err.Error())
|
||||
}
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
// AssetNames returns the names of the assets.
|
||||
func AssetNames() []string {
|
||||
names := make([]string, 0, len(_bindata))
|
||||
for name := range _bindata {
|
||||
names = append(names, name)
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
||||
|
||||
{{- if $.Restore}}
|
||||
|
||||
// RestoreAsset restores an asset under the given directory
|
||||
func RestoreAsset(dir, name string) error {
|
||||
return restore.Asset(dir, name, AssetAndInfo)
|
||||
}
|
||||
|
||||
// RestoreAssets restores an asset under the given directory recursively
|
||||
func RestoreAssets(dir, name string) error {
|
||||
return restore.Assets(dir, name, AssetDir, AssetAndInfo)
|
||||
}
|
||||
{{- end}}`))
|
||||
}
|
209
tools/vendor/github.com/tmthrgd/go-bindata/config.go
generated
vendored
Normal file
209
tools/vendor/github.com/tmthrgd/go-bindata/config.go
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"hash"
|
||||
"os"
|
||||
|
||||
"github.com/tmthrgd/go-bindata/internal/identifier"
|
||||
)
|
||||
|
||||
// HashFormat specifies which format to use when hashing names.
|
||||
type HashFormat int
|
||||
|
||||
const (
|
||||
// NameUnchanged leaves the file name unchanged.
|
||||
NameUnchanged HashFormat = iota
|
||||
// DirHash formats names like path/to/hash/name.ext.
|
||||
DirHash
|
||||
// NameHashSuffix formats names like path/to/name-hash.ext.
|
||||
NameHashSuffix
|
||||
// HashWithExt formats names like path/to/hash.ext.
|
||||
HashWithExt
|
||||
)
|
||||
|
||||
func (hf HashFormat) String() string {
|
||||
switch hf {
|
||||
case NameUnchanged:
|
||||
return "unchanged"
|
||||
case DirHash:
|
||||
return "dir"
|
||||
case NameHashSuffix:
|
||||
return "namesuffix"
|
||||
case HashWithExt:
|
||||
return "hashext"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// HashEncoding specifies which encoding to use when hashing names.
|
||||
type HashEncoding int
|
||||
|
||||
const (
|
||||
// HexHash uses hexadecimal encoding.
|
||||
HexHash HashEncoding = iota
|
||||
// Base32Hash uses unpadded, lowercase standard base32
|
||||
// encoding (see RFC 4648).
|
||||
Base32Hash
|
||||
// Base64Hash uses an unpadded URL-safe base64 encoding
|
||||
// defined in RFC 4648.
|
||||
Base64Hash
|
||||
)
|
||||
|
||||
func (he HashEncoding) String() string {
|
||||
switch he {
|
||||
case HexHash:
|
||||
return "hex"
|
||||
case Base32Hash:
|
||||
return "base32"
|
||||
case Base64Hash:
|
||||
return "base64"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateOptions defines a set of options to use
|
||||
// when generating the Go code.
|
||||
type GenerateOptions struct {
|
||||
// Name of the package to use.
|
||||
Package string
|
||||
|
||||
// Tags specify a set of optional build tags, which should be
|
||||
// included in the generated output. The tags are appended to a
|
||||
// `// +build` line in the beginning of the output file
|
||||
// and must follow the build tags syntax specified by the go tool.
|
||||
Tags string
|
||||
|
||||
// MemCopy will alter the way the output file is generated.
|
||||
//
|
||||
// If false, it will employ a hack that allows us to read the file data directly
|
||||
// from the compiled program's `.rodata` section. This ensures that when we call
|
||||
// call our generated function, we omit unnecessary mem copies.
|
||||
//
|
||||
// The downside of this, is that it requires dependencies on the `reflect` and
|
||||
// `unsafe` packages. These may be restricted on platforms like AppEngine and
|
||||
// thus prevent you from using this mode.
|
||||
//
|
||||
// Another disadvantage is that the byte slice we create, is strictly read-only.
|
||||
// For most use-cases this is not a problem, but if you ever try to alter the
|
||||
// returned byte slice, a runtime panic is thrown. Use this mode only on target
|
||||
// platforms where memory constraints are an issue.
|
||||
//
|
||||
// The default behaviour is to use the old code generation method. This
|
||||
// prevents the two previously mentioned issues, but will employ at least one
|
||||
// extra memcopy and thus increase memory requirements.
|
||||
//
|
||||
// For instance, consider the following two examples:
|
||||
//
|
||||
// This would be the default mode, using an extra memcopy but gives a safe
|
||||
// implementation without dependencies on `reflect` and `unsafe`:
|
||||
//
|
||||
// func myfile() []byte {
|
||||
// return []byte{0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a}
|
||||
// }
|
||||
//
|
||||
// Here is the same functionality, but uses the `.rodata` hack.
|
||||
// The byte slice returned from this example can not be written to without
|
||||
// generating a runtime error.
|
||||
//
|
||||
// var _myfile = "\x89\x50\x4e\x47\x0d\x0a\x1a"
|
||||
//
|
||||
// func myfile() []byte {
|
||||
// var empty [0]byte
|
||||
// sx := (*reflect.StringHeader)(unsafe.Pointer(&_myfile))
|
||||
// b := empty[:]
|
||||
// bx := (*reflect.SliceHeader)(unsafe.Pointer(&b))
|
||||
// bx.Data = sx.Data
|
||||
// bx.Len = len(_myfile)
|
||||
// bx.Cap = bx.Len
|
||||
// return b
|
||||
// }
|
||||
MemCopy bool
|
||||
|
||||
// Compress means the assets are GZIP compressed before being turned into
|
||||
// Go code. The generated function will automatically unzip the file data
|
||||
// when called. Defaults to true.
|
||||
Compress bool
|
||||
|
||||
// Perform a debug build. This generates an asset file, which
|
||||
// loads the asset contents directly from disk at their original
|
||||
// location, instead of embedding the contents in the code.
|
||||
//
|
||||
// This is mostly useful if you anticipate that the assets are
|
||||
// going to change during your development cycle. You will always
|
||||
// want your code to access the latest version of the asset.
|
||||
// Only in release mode, will the assets actually be embedded
|
||||
// in the code. The default behaviour is Release mode.
|
||||
Debug bool
|
||||
|
||||
// Perform a dev build, which is nearly identical to the debug option. The
|
||||
// only difference is that instead of absolute file paths in generated code,
|
||||
// it expects a variable, `rootDir`, to be set in the generated code's
|
||||
// package (the author needs to do this manually), which it then prepends to
|
||||
// an asset's name to construct the file path on disk.
|
||||
//
|
||||
// This is mainly so you can push the generated code file to a shared
|
||||
// repository.
|
||||
Dev bool
|
||||
|
||||
// When true, the AssetDir API will be provided.
|
||||
AssetDir bool
|
||||
|
||||
// When true, only gzip decompress the data on first use.
|
||||
DecompressOnce bool
|
||||
|
||||
// [Deprecated]: use github.com/tmthrgd/go-bindata/restore.
|
||||
Restore bool
|
||||
|
||||
// When false, size, mode and modtime are not preserved from files
|
||||
Metadata bool
|
||||
// When nonzero, use this as mode for all files.
|
||||
Mode os.FileMode
|
||||
// When nonzero, use this as unix timestamp for all files.
|
||||
ModTime int64
|
||||
|
||||
// Hash is used to produce a hash of the file.
|
||||
Hash hash.Hash
|
||||
// Which of the given name hashing formats to use.
|
||||
HashFormat HashFormat
|
||||
// The length of the hash to use, defaults to 16 characters.
|
||||
HashLength uint
|
||||
// The encoding to use to encode the name hash.
|
||||
HashEncoding HashEncoding
|
||||
}
|
||||
|
||||
// validate ensures the config has sane values.
|
||||
// Part of which means checking if certain file/directory paths exist.
|
||||
func (opts *GenerateOptions) validate() error {
|
||||
if len(opts.Package) == 0 {
|
||||
return errors.New("go-bindata: missing package name")
|
||||
}
|
||||
|
||||
if identifier.Identifier(opts.Package) != opts.Package {
|
||||
return errors.New("go-bindata: package name is not valid identifier")
|
||||
}
|
||||
|
||||
if opts.Metadata && (opts.Mode != 0 && opts.ModTime != 0) {
|
||||
return errors.New("go-bindata: if Metadata is true, one of Mode or ModTime must be zero")
|
||||
}
|
||||
|
||||
if opts.Mode&^os.ModePerm != 0 {
|
||||
return errors.New("go-bindata: invalid mode specified")
|
||||
}
|
||||
|
||||
if opts.Hash != nil && (opts.Debug || opts.Dev) {
|
||||
return errors.New("go-bindata: Hash is not compatible with Debug and Dev")
|
||||
}
|
||||
|
||||
if opts.Restore && !opts.AssetDir {
|
||||
return errors.New("go-bindata: Restore cannot be used without AssetDir")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
68
tools/vendor/github.com/tmthrgd/go-bindata/corpus-sha256sums
generated
vendored
Normal file
68
tools/vendor/github.com/tmthrgd/go-bindata/corpus-sha256sums
generated
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
fc42e0de8a85c266577354c445ac1c1e442518a5b4c8ee9805412e43122f71fb asset-dir.go
|
||||
e75b6c1141dc6b286c9fe93d6a94ff417af042e3b619db1a7d8f58451e984e34 compress.go
|
||||
528410b942177ea42119c5f23e39085cbf9dfce8ba96d509f5345609e59ce309 copy.go
|
||||
4b6c823a2574f6c5f1cdc3437092b06297c0c6708e6e418c336a704a3bd6d3eb debug.go
|
||||
1c0b2f7a7f97f82a4d89ce82b20a851772ddc7c8865528867bf6ca1977c7ac45 decompress-once.go
|
||||
036697010d259574c5a15edb33f1e286af58a40e1ab91200b2a4b7d9fc40f187 default.go
|
||||
63cb3ba6ffc51486843abe7eb2370c07a0d05146fc34a2379b364d68a31b49ef dev.go
|
||||
b3d63994930c24602ebbe40437d07bdda9720d1cde48df43befd8a553e90504b hash-copy.go
|
||||
33f7e152f69bc9e9881d74c82e3e27a2c130d1592b96727d3467615ee700788e hash-dir.go
|
||||
3b8ff973c806b675eb75c227a0fe5d708dd2b4b8e3174b98391cbf3480877ece hash-enc-b32.go
|
||||
38395de6c76cbf57eb6fc028b3d317ba2b2a5d9a843fb970df3a6f62b09a1fce hash-enc-b64.go
|
||||
cbb867c075721ae0b7f85d00227e81f607f6310135379a5f4d98fb298909f56a hash-hashext.go
|
||||
a9af7ffafc51642ef48af1d7622ff6a88c0c0c1b831cae2509ed769b12b52097 hash-suffix.go
|
||||
362b96b80fae7b41934a33d8514d71664acb041624c981c61c172d4037502eaf hash-unchanged.go
|
||||
e0512fb269ace4db9df83c075d9f82fceb63e3551a3c25e2dc551f67dc7c72f4 metadata.go
|
||||
6cf3e7ac27ab257f89f37b9353ccb10cab8011072e9209cdf1700af82b0b1ac4 old-default.go
|
||||
bf6f8e4f11b730a5ac3b13040d2ba50785296b6f993f1e0640fdc3c3e53f4d85 package.go
|
||||
a59cbba67616477980ca99dc678a8e66270494235f84a203372841737535f824 random-#10.go
|
||||
3ef50db7180413665ea96ebb63b6656168937a106fa9dbf0120cc2c40e166b20 random-#11.go
|
||||
ac6d298a012ddf45ff33655d37b9e64c6a75ee2756038b0896b3f98a38a29c66 random-#12.go
|
||||
a65c8e35b650cd922b85804314234415adc0732d9b46a2a95be5616e725a9341 random-#13.go
|
||||
fad9e724379617bddb4131bcd93e2608f867782acae1439d9419dc356701ab0d random-#14.go
|
||||
e96ccf210c4b1b8e51942ae8674d9dd3f38f71920bc54dc3590362e6ec8b7faf random-#15.go
|
||||
d8a1df711301ba25dfb90204a953aa0722e7c821e03b90f84797dfafec418de3 random-#16.go
|
||||
9256675f720083bf23aa380a0365a35b8719591bfb2f51b7e306e74c7a34d38a random-#17.go
|
||||
f50b6362f47eeb7cfdadd4ab2f309fd2c99c76031c9e7335ff6ce06ab3c70285 random-#18.go
|
||||
cc477c7647957306931d72b521a8d6919edd41f2bccf2dd8508381bf9f5105fe random-#19.go
|
||||
5bd5a4569b63a4a3d2399c943fa5d6d813a0e1e97b36b009503549e34668fb81 random-#1.go
|
||||
8a8a0fc1a2e417cba60eb025d3ce741897e69906b9133cdc10b7ac61bbb27bf4 random-#20.go
|
||||
43d34aaad7e9a6c6192126bb941f94305bca8b61c77581662bc7ce5f2dbcbdc9 random-#21.go
|
||||
b4798deab1655ad7b5e8b5b7cd1f51b444ff4a2560107fd9787535ab1d3d7f80 random-#22.go
|
||||
6ee3706961f8d6e80ed40e8603678371524259dc30cfeb4d335a354f4ebf7f07 random-#23.go
|
||||
4d74619c90d2c9385e28ef2321964878a88395f3b6968d67c003e3cb16a822b1 random-#24.go
|
||||
34742bb4fc1edaea5a4fe679d1c0717b54445c52421e31b06fe73e8744bf0734 random-#25.go
|
||||
a61cf74fdcd8b6bbbcbf02a3c58a52be164603fc6082facf7158c132bf5824aa random-#26.go
|
||||
2d78d2006d49fcdd45d7b8f6b4318cd799f194067bf3fc635b33178161c7d63c random-#27.go
|
||||
d348b22e3fdc3c98da7a7564483b08742cef36f54f3e30500ecf69ccd0b97777 random-#28.go
|
||||
ebedf92840958164911c44045dfce0d3c7ed18973a3893bcfb6f9239e13b31b8 random-#29.go
|
||||
c5ca8a9518e5f0926d48a621d595f48d76cb2891a40c0b22f3534a3a91031a4f random-#2.go
|
||||
a0c77fa7246dd63e5f5dc4663d52cf6abf0e425a86726ebb96973f5b709f97c6 random-#30.go
|
||||
d8d8ef43d718981ead2725cd15a17ef6fc33956dbca65bc9fd5ef5cf6b8da066 random-#31.go
|
||||
254b340abe80d09fd7c6ba21bd7050b705462d7b6d640789489a3dfb3c1620de random-#32.go
|
||||
d5dc83c6b344fd3374438c59fe1d2e42e6f5c15fcc11aeb2248f6a7f6882b596 random-#33.go
|
||||
073ecfa891403d7a87d170fa6c9394ee2a2fff6d8fe621d68b3da0fdad2438e4 random-#34.go
|
||||
7a293b4be49a2813643a74d32c91cc0a1cf1010a760d5d6ae40e3a94da70d70d random-#35.go
|
||||
02fac0bed12bce3cf4ffb00376079f5b816634bc1b59a451643c1e8c0e860837 random-#36.go
|
||||
201e6fab72a37d75665107b09dee6fb84722d4265fc8400dc855f6d5a449b27d random-#37.go
|
||||
38d1db1022acb1e92725e6bb8caf8bcfbd5dea0ae85f8bd903c6c16993d51ee5 random-#38.go
|
||||
a7ba47ad58d95821fede9cf11339b35e19c4173eb35131a459ed431dbb02a573 random-#39.go
|
||||
42ad847d45c941ca674e660a581867c07f1f568242cac008589cdee8500474e2 random-#3.go
|
||||
c38ad926d43afa044e784fc41a1f1576a1207713c390c1cc9d5f5c800ad7c056 random-#40.go
|
||||
e300d3ccfbc656eefebd30e6b341a081163aea6b8e1e2d2bbad8ee9b5b82a1b5 random-#41.go
|
||||
fc7d235f1f12d9d2d96be00810e66572a63adb86ff7cba17e77bc45018ade66a random-#42.go
|
||||
56415a962fdd0a3453526620ad8dff8b90b5731f3b5f8247c0d33e35ae343002 random-#43.go
|
||||
4ca790becea62b89cb687f81819f09f27274a2f64f6ca0ab919b07577d3ea12f random-#44.go
|
||||
154ba30b7bd61dac5234e5616e2b5097722001234119442bcde4e4aa5db0a43a random-#45.go
|
||||
86bbb3ad9cbba89cb1262677d490dd3a87ca091892010381fffdd2af8ad74761 random-#46.go
|
||||
0d6b42a9702911ef1593047197dd19d0c6402a9b4542e53697281109b8eca566 random-#47.go
|
||||
6384db969ed03ae0704657fbf3ad5b25a169f2166ee8cd643daa9d887af59aae random-#48.go
|
||||
bf3ba0bc573208fdcc6a48067d4fbfb965da54f6b641752e192197a2db480120 random-#49.go
|
||||
af77fcf2ac300945c9c726d0f1de13366522f4f6cb44190d036a86c140eb54d1 random-#4.go
|
||||
f672c3e1c8b3449695167bfad4b68dd7aff36b4d0148a1d82e429c3eb24be99c random-#50.go
|
||||
7a9900d444ae8ec5e241a58dd2cca600d1dc2d1101d8f23e3dc65fcd2c2e560f random-#5.go
|
||||
dd19e896cdb36b1e697f5e2e9e8ebf313c3ee2bd92412097f9ec1742635f11cf random-#6.go
|
||||
f458f51ae48736aaa14fd0720322114c303ebab93d2dc6dddc33a1fb759407f1 random-#7.go
|
||||
eb785847c9dbdd927b98e71376d00aca87e887a380b67ce8448c39b0caff5d98 random-#8.go
|
||||
a27d2578f5aefa2086c3a296f1b1af989e126cce226a4a04172a37a7048f39eb random-#9.go
|
||||
b1278bab5b1741bef1f32893397b8b1c61fa59182d95ca0c236c8995e24d9aa7 tags.go
|
7
tools/vendor/github.com/tmthrgd/go-bindata/corpus_test_travis.sh
generated
vendored
Executable file
7
tools/vendor/github.com/tmthrgd/go-bindata/corpus_test_travis.sh
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -ev
|
||||
|
||||
if [ "${TRAVIS_PULL_REQUEST}" = "false" ]; then
|
||||
go test -v -race -run TestCorpus -randtests 50 -corpus .travis-corpus -gencorpus .
|
||||
cd .travis-corpus && sha256sum -c --quiet --strict ../corpus-sha256sums
|
||||
fi
|
84
tools/vendor/github.com/tmthrgd/go-bindata/debug.go
generated
vendored
Normal file
84
tools/vendor/github.com/tmthrgd/go-bindata/debug.go
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import "text/template"
|
||||
|
||||
func init() {
|
||||
template.Must(template.Must(template.Must(baseTemplate.New("debug").Funcs(template.FuncMap{
|
||||
"format": formatTemplate,
|
||||
}).Parse(`import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
{{- if $.AssetDir}}
|
||||
"strings"
|
||||
{{- end}}
|
||||
{{- if $.Restore}}
|
||||
|
||||
"github.com/tmthrgd/go-bindata/restore"
|
||||
{{- end}}
|
||||
)
|
||||
|
||||
// AssetAndInfo loads and returns the asset and asset info for the
|
||||
// given name. It returns an error if the asset could not be found
|
||||
// or could not be loaded.
|
||||
func AssetAndInfo(name string) ([]byte, os.FileInfo, error) {
|
||||
path, ok := _bindata[filepath.ToSlash(name)]
|
||||
if !ok {
|
||||
return nil, nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
|
||||
{{- if $.Dev}}
|
||||
|
||||
path = filepath.Join(rootDir, path)
|
||||
{{- end}}
|
||||
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
fi, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return data, fi, nil
|
||||
}
|
||||
|
||||
// AssetInfo loads and returns the asset info for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func AssetInfo(name string) (os.FileInfo, error) {
|
||||
path, ok := _bindata[filepath.ToSlash(name)]
|
||||
if !ok {
|
||||
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
|
||||
{{- if $.Dev}}
|
||||
|
||||
path = filepath.Join(rootDir, path)
|
||||
{{- end}}
|
||||
return os.Stat(path)
|
||||
}
|
||||
|
||||
// _bindata is a table, mapping each file to its path.
|
||||
{{if $.Dev -}}
|
||||
{{format "bindata-dev" $}}
|
||||
{{- else -}}
|
||||
{{format "bindata-debug" $}}
|
||||
{{- end}}`)).New("bindata-debug").Parse(`
|
||||
var _bindata = map[string]string{
|
||||
{{range .Assets -}}
|
||||
{{printf "%q" .Name}}: {{printf "%q" .AbsolutePath}},
|
||||
{{end -}}
|
||||
}`)).New("bindata-dev").Parse(`
|
||||
var _bindata = map[string]string{
|
||||
{{range .Assets -}}
|
||||
{{printf "%q" .Name}}: {{printf "%q" .Name}},
|
||||
{{end -}}
|
||||
}`))
|
||||
}
|
128
tools/vendor/github.com/tmthrgd/go-bindata/doc.go
generated
vendored
Normal file
128
tools/vendor/github.com/tmthrgd/go-bindata/doc.go
generated
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package bindata converts any file into manageable Go source code. Useful for
|
||||
embedding binary data into a go program. The file data is optionally gzip
|
||||
compressed before being converted to a raw byte slice.
|
||||
|
||||
The following paragraphs cover some of the customization options
|
||||
which can be specified in the Config struct, which must be passed into
|
||||
the Translate() call.
|
||||
|
||||
|
||||
Debug vs Release builds
|
||||
|
||||
When used with the `Debug` option, the generated code does not actually include
|
||||
the asset data. Instead, it generates function stubs which load the data from
|
||||
the original file on disk. The asset API remains identical between debug and
|
||||
release builds, so your code will not have to change.
|
||||
|
||||
This is useful during development when you expect the assets to change often.
|
||||
The host application using these assets uses the same API in both cases and
|
||||
will not have to care where the actual data comes from.
|
||||
|
||||
An example is a Go webserver with some embedded, static web content like
|
||||
HTML, JS and CSS files. While developing it, you do not want to rebuild the
|
||||
whole server and restart it every time you make a change to a bit of
|
||||
javascript. You just want to build and launch the server once. Then just press
|
||||
refresh in the browser to see those changes. Embedding the assets with the
|
||||
`debug` flag allows you to do just that. When you are finished developing and
|
||||
ready for deployment, just re-invoke `go-bindata` without the `-debug` flag.
|
||||
It will now embed the latest version of the assets.
|
||||
|
||||
|
||||
Lower memory footprint
|
||||
|
||||
The `MemCopy` option will alter the way the output file is generated.
|
||||
If false, it will employ a hack that allows us to read the file data directly
|
||||
from the compiled program's `.rodata` section. This ensures that when we call
|
||||
call our generated function, we omit unnecessary memcopies.
|
||||
|
||||
The downside of this, is that it requires dependencies on the `reflect` and
|
||||
`unsafe` packages. These may be restricted on platforms like AppEngine and
|
||||
thus prevent you from using this mode.
|
||||
|
||||
Another disadvantage is that the byte slice we create, is strictly read-only.
|
||||
For most use-cases this is not a problem, but if you ever try to alter the
|
||||
returned byte slice, a runtime panic is thrown. Use this mode only on target
|
||||
platforms where memory constraints are an issue.
|
||||
|
||||
The default behaviour is to use the old code generation method. This
|
||||
prevents the two previously mentioned issues, but will employ at least one
|
||||
extra memcopy and thus increase memory requirements.
|
||||
|
||||
For instance, consider the following two examples:
|
||||
|
||||
This would be the default mode, using an extra memcopy but gives a safe
|
||||
implementation without dependencies on `reflect` and `unsafe`:
|
||||
|
||||
func myfile() []byte {
|
||||
return []byte{0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a}
|
||||
}
|
||||
|
||||
Here is the same functionality, but uses the `.rodata` hack.
|
||||
The byte slice returned from this example can not be written to without
|
||||
generating a runtime error.
|
||||
|
||||
var _myfile = "\x89\x50\x4e\x47\x0d\x0a\x1a"
|
||||
|
||||
func myfile() []byte {
|
||||
var empty [0]byte
|
||||
sx := (*reflect.StringHeader)(unsafe.Pointer(&_myfile))
|
||||
b := empty[:]
|
||||
bx := (*reflect.SliceHeader)(unsafe.Pointer(&b))
|
||||
bx.Data = sx.Data
|
||||
bx.Len = len(_myfile)
|
||||
bx.Cap = bx.Len
|
||||
return b
|
||||
}
|
||||
|
||||
|
||||
Optional compression
|
||||
|
||||
The Compress option indicates that the supplied assets are GZIP compressed before
|
||||
being turned into Go code. The data should still be accessed through a function
|
||||
call, so nothing changes in the API.
|
||||
|
||||
This feature is useful if you do not care for compression, or the supplied
|
||||
resource is already compressed. Doing it again would not add any value and may
|
||||
even increase the size of the data.
|
||||
|
||||
The default behaviour of the program is to use compression.
|
||||
|
||||
|
||||
Path prefix stripping
|
||||
|
||||
The keys used in the `_bindata` map are the same as the input file name
|
||||
passed to `go-bindata`. This includes the path. In most cases, this is not
|
||||
desirable, as it puts potentially sensitive information in your code base.
|
||||
For this purpose, the tool supplies another command line flag `-prefix`.
|
||||
This accepts a portion of a path name, which should be stripped off from
|
||||
the map keys and function names.
|
||||
|
||||
For example, running without the `-prefix` flag, we get:
|
||||
|
||||
$ go-bindata /path/to/templates/
|
||||
|
||||
_bindata["/path/to/templates/foo.html"] = path_to_templates_foo_html
|
||||
|
||||
Running with the `-prefix` flag, we get:
|
||||
|
||||
$ go-bindata -prefix "/path/to/" /path/to/templates/
|
||||
|
||||
_bindata["templates/foo.html"] = templates_foo_html
|
||||
|
||||
|
||||
Build tags
|
||||
|
||||
With the optional Tags field, you can specify any go build tags that
|
||||
must be fulfilled for the output file to be included in a build. This
|
||||
is useful when including binary data in multiple formats, where the desired
|
||||
format is specified at build time with the appropriate tags.
|
||||
|
||||
The tags are appended to a `// +build` line in the beginning of the output file
|
||||
and must follow the build tags syntax specified by the go tool.
|
||||
*/
|
||||
package bindata
|
130
tools/vendor/github.com/tmthrgd/go-bindata/files.go
generated
vendored
Normal file
130
tools/vendor/github.com/tmthrgd/go-bindata/files.go
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// File represents a single asset file.
|
||||
type File interface {
|
||||
// Name returns the name by which asset is referenced.
|
||||
Name() string
|
||||
// Path returns the relative path to the file.
|
||||
Path() string
|
||||
// AbsolutePath returns the absolute path to the file.
|
||||
AbsolutePath() string
|
||||
|
||||
// Open returns an io.ReadCloser for reading the file.
|
||||
Open() (io.ReadCloser, error)
|
||||
// Stat returns an os.FileInfo interface representing the file.
|
||||
Stat() (os.FileInfo, error)
|
||||
}
|
||||
|
||||
// Files represents a collection of asset files.
|
||||
type Files []File
|
||||
|
||||
type osFile struct {
|
||||
name string
|
||||
path string
|
||||
}
|
||||
|
||||
func (f *osFile) Name() string {
|
||||
return f.name
|
||||
}
|
||||
|
||||
func (f *osFile) Path() string {
|
||||
return f.path
|
||||
}
|
||||
|
||||
func (f *osFile) AbsolutePath() string {
|
||||
path, err := filepath.Abs(f.path)
|
||||
if err != nil {
|
||||
return f.path
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
func (f *osFile) Open() (io.ReadCloser, error) {
|
||||
return os.Open(f.path)
|
||||
}
|
||||
|
||||
func (f *osFile) Stat() (os.FileInfo, error) {
|
||||
return os.Stat(f.path)
|
||||
}
|
||||
|
||||
// FindFilesOptions defines a set of options to use
|
||||
// when searching for files.
|
||||
type FindFilesOptions struct {
|
||||
// Prefix defines a path prefix which should be stripped from all
|
||||
// file names when generating the keys in the table of contents.
|
||||
// For example, running without the `-prefix` flag, we get:
|
||||
//
|
||||
// $ go-bindata /path/to/templates
|
||||
// go_bindata["/path/to/templates/foo.html"] = _path_to_templates_foo_html
|
||||
//
|
||||
// Running with the `-prefix` flag, we get:
|
||||
//
|
||||
// $ go-bindata -prefix "/path/to/" /path/to/templates/foo.html
|
||||
// go_bindata["templates/foo.html"] = templates_foo_html
|
||||
Prefix string
|
||||
|
||||
// Recursive defines whether subdirectories of Path
|
||||
// should be recursively included in the conversion.
|
||||
Recursive bool
|
||||
|
||||
// Ignores any filenames matching the regex pattern specified, e.g.
|
||||
// path/to/file.ext will ignore only that file, or \\.gitignore
|
||||
// will match any .gitignore file.
|
||||
//
|
||||
// This parameter can be provided multiple times.
|
||||
Ignore []*regexp.Regexp
|
||||
}
|
||||
|
||||
// FindFiles adds all files inside a directory to the
|
||||
// generated output.
|
||||
func FindFiles(path string, opts *FindFilesOptions) (files Files, err error) {
|
||||
if opts == nil {
|
||||
opts = new(FindFilesOptions)
|
||||
}
|
||||
|
||||
if err = filepath.Walk(path, func(assetPath string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
if !opts.Recursive && assetPath != path {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, re := range opts.Ignore {
|
||||
if re.MatchString(assetPath) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
name := strings.TrimPrefix(filepath.ToSlash(
|
||||
strings.TrimPrefix(assetPath, opts.Prefix)), "/")
|
||||
if name == "" {
|
||||
panic("should be impossible")
|
||||
}
|
||||
|
||||
files = append(files, &osFile{name, assetPath})
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return
|
||||
}
|
47
tools/vendor/github.com/tmthrgd/go-bindata/format.go
generated
vendored
Normal file
47
tools/vendor/github.com/tmthrgd/go-bindata/format.go
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
var printerConfig = printer.Config{
|
||||
Mode: printer.UseSpaces | printer.TabIndent,
|
||||
Tabwidth: 8,
|
||||
}
|
||||
|
||||
func formatTemplate(name string, data interface{}) (string, error) {
|
||||
buf := bufPool.Get().(*bytes.Buffer)
|
||||
defer func() {
|
||||
buf.Reset()
|
||||
bufPool.Put(buf)
|
||||
}()
|
||||
|
||||
buf.WriteString("package main;")
|
||||
|
||||
if err := baseTemplate.ExecuteTemplate(buf, name, data); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
fset := token.NewFileSet()
|
||||
|
||||
f, err := parser.ParseFile(fset, "", buf, parser.ParseComments)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
buf.Reset()
|
||||
|
||||
if err = printerConfig.Fprint(buf, fset, f); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
out := string(bytes.TrimSpace(buf.Bytes()[len("package main\n"):]))
|
||||
return out, nil
|
||||
}
|
76
tools/vendor/github.com/tmthrgd/go-bindata/generate.go
generated
vendored
Normal file
76
tools/vendor/github.com/tmthrgd/go-bindata/generate.go
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"io"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// binAsset holds information about a single asset to be processed.
|
||||
type binAsset struct {
|
||||
File
|
||||
|
||||
opts *GenerateOptions
|
||||
Hash []byte // Generated hash of file.
|
||||
mangledName string
|
||||
}
|
||||
|
||||
// Generate writes the generated Go code to w.
|
||||
func (f Files) Generate(w io.Writer, opts *GenerateOptions) error {
|
||||
if opts == nil {
|
||||
opts = &GenerateOptions{Package: "main"}
|
||||
}
|
||||
|
||||
err := opts.validate()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
assets := make([]binAsset, 0, len(f))
|
||||
for i, file := range f {
|
||||
asset := binAsset{
|
||||
File: file,
|
||||
|
||||
opts: opts,
|
||||
}
|
||||
|
||||
if opts.Hash != nil {
|
||||
if i != 0 {
|
||||
opts.Hash.Reset()
|
||||
}
|
||||
|
||||
if err = asset.copy(opts.Hash); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
asset.Hash = opts.Hash.Sum(nil)
|
||||
}
|
||||
|
||||
assets = append(assets, asset)
|
||||
}
|
||||
|
||||
return baseTemplate.Execute(w, struct {
|
||||
*GenerateOptions
|
||||
Assets []binAsset
|
||||
}{opts, assets})
|
||||
}
|
||||
|
||||
var baseTemplate = template.Must(template.New("base").Parse(`
|
||||
{{- template "header" .}}
|
||||
|
||||
{{if or $.Debug $.Dev -}}
|
||||
{{- template "debug" . -}}
|
||||
{{- else -}}
|
||||
{{- template "release" . -}}
|
||||
{{- end}}
|
||||
|
||||
{{template "common" . -}}
|
||||
|
||||
{{- if $.AssetDir}}
|
||||
|
||||
{{template "tree" . -}}
|
||||
{{- end}}
|
||||
`))
|
44
tools/vendor/github.com/tmthrgd/go-bindata/go-bindata/appendRegexValue.go
generated
vendored
Normal file
44
tools/vendor/github.com/tmthrgd/go-bindata/go-bindata/appendRegexValue.go
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
type appendRegexValue []*regexp.Regexp
|
||||
|
||||
func (ar *appendRegexValue) String() string {
|
||||
if ar == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
for i, r := range *ar {
|
||||
if i != 0 {
|
||||
buf.WriteString(", ")
|
||||
}
|
||||
|
||||
buf.WriteString(r.String())
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (ar *appendRegexValue) Set(value string) error {
|
||||
r, err := regexp.Compile(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if *ar == nil {
|
||||
*ar = make([]*regexp.Regexp, 0, 1)
|
||||
}
|
||||
|
||||
*ar = append(*ar, r)
|
||||
return nil
|
||||
}
|
178
tools/vendor/github.com/tmthrgd/go-bindata/go-bindata/main.go
generated
vendored
Normal file
178
tools/vendor/github.com/tmthrgd/go-bindata/go-bindata/main.go
generated
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/tmthrgd/go-bindata"
|
||||
"github.com/tmthrgd/go-bindata/internal/identifier"
|
||||
)
|
||||
|
||||
func must(err error) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "go-bindata: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
genOpts, findOpts, output := parseArgs()
|
||||
|
||||
var all bindata.Files
|
||||
|
||||
for i := 0; i < flag.NArg(); i++ {
|
||||
var path string
|
||||
path, findOpts.Recursive = parseInput(flag.Arg(i))
|
||||
|
||||
files, err := bindata.FindFiles(path, findOpts)
|
||||
must(err)
|
||||
|
||||
all = append(all, files...)
|
||||
}
|
||||
|
||||
f, err := os.OpenFile(output, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
|
||||
must(err)
|
||||
|
||||
defer f.Close()
|
||||
|
||||
must(all.Generate(f, genOpts))
|
||||
}
|
||||
|
||||
// parseArgs create s a new, filled configuration instance
|
||||
// by reading and parsing command line options.
|
||||
//
|
||||
// This function exits the program with an error, if
|
||||
// any of the command line options are incorrect.
|
||||
func parseArgs() (genOpts *bindata.GenerateOptions, findOpts *bindata.FindFilesOptions, output string) {
|
||||
flag.Usage = func() {
|
||||
fmt.Printf("Usage: %s [options] <input directories>\n\n", os.Args[0])
|
||||
flag.PrintDefaults()
|
||||
}
|
||||
|
||||
var version bool
|
||||
flag.BoolVar(&version, "version", false, "Displays version information.")
|
||||
|
||||
flag.StringVar(&output, "o", "./bindata.go", "Optional name of the output file to be generated.")
|
||||
|
||||
genOpts = &bindata.GenerateOptions{
|
||||
Package: "main",
|
||||
MemCopy: true,
|
||||
Compress: true,
|
||||
Metadata: true,
|
||||
Restore: true,
|
||||
AssetDir: true,
|
||||
DecompressOnce: true,
|
||||
}
|
||||
findOpts = new(bindata.FindFilesOptions)
|
||||
|
||||
var noMemCopy, noCompress, noMetadata bool
|
||||
var mode uint
|
||||
flag.BoolVar(&genOpts.Debug, "debug", genOpts.Debug, "Do not embed the assets, but provide the embedding API. Contents will still be loaded from disk.")
|
||||
flag.BoolVar(&genOpts.Dev, "dev", genOpts.Dev, "Similar to debug, but does not emit absolute paths. Expects a rootDir variable to already exist in the generated code's package.")
|
||||
flag.StringVar(&genOpts.Tags, "tags", genOpts.Tags, "Optional set of build tags to include.")
|
||||
flag.StringVar(&findOpts.Prefix, "prefix", "", "Optional path prefix to strip off asset names.")
|
||||
flag.StringVar(&genOpts.Package, "pkg", genOpts.Package, "Package name to use in the generated code.")
|
||||
flag.BoolVar(&noMemCopy, "nomemcopy", !genOpts.MemCopy, "Use a .rodata hack to get rid of unnecessary memcopies. Refer to the documentation to see what implications this carries.")
|
||||
flag.BoolVar(&noCompress, "nocompress", !genOpts.Compress, "Assets will *not* be GZIP compressed when this flag is specified.")
|
||||
flag.BoolVar(&noMetadata, "nometadata", !genOpts.Metadata, "Assets will not preserve size, mode, and modtime info.")
|
||||
flag.UintVar(&mode, "mode", uint(genOpts.Mode), "Optional file mode override for all files.")
|
||||
flag.Int64Var(&genOpts.ModTime, "modtime", genOpts.ModTime, "Optional modification unix timestamp override for all files.")
|
||||
flag.Var((*appendRegexValue)(&findOpts.Ignore), "ignore", "Regex pattern to ignore")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if version {
|
||||
fmt.Fprintf(os.Stderr, "go-bindata (Go runtime %s).\n", runtime.Version())
|
||||
io.WriteString(os.Stderr, "Copyright (c) 2010-2013, Jim Teeuwen.\n")
|
||||
io.WriteString(os.Stderr, "Copyright (c) 2017, Tom Thorogood.\n")
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Make sure we have input paths.
|
||||
if flag.NArg() == 0 {
|
||||
io.WriteString(os.Stderr, "Missing <input dir>\n\n")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if output == "" {
|
||||
var err error
|
||||
output, err = filepath.Abs("bindata.go")
|
||||
must(err)
|
||||
}
|
||||
|
||||
genOpts.MemCopy = !noMemCopy
|
||||
genOpts.Compress = !noCompress
|
||||
genOpts.Metadata = !noMetadata && (genOpts.Mode == 0 || genOpts.ModTime == 0)
|
||||
|
||||
genOpts.Mode = os.FileMode(mode)
|
||||
|
||||
var pkgSet, outputSet bool
|
||||
flag.Visit(func(f *flag.Flag) {
|
||||
switch f.Name {
|
||||
case "pkg":
|
||||
pkgSet = true
|
||||
case "o":
|
||||
outputSet = true
|
||||
}
|
||||
})
|
||||
|
||||
// Change pkg to containing directory of output. If output flag is set and package flag is not.
|
||||
if outputSet && !pkgSet {
|
||||
pkg := identifier.Identifier(filepath.Base(filepath.Dir(output)))
|
||||
if pkg != "" {
|
||||
genOpts.Package = pkg
|
||||
}
|
||||
}
|
||||
|
||||
if !genOpts.MemCopy && genOpts.Compress {
|
||||
io.WriteString(os.Stderr, "The use of -nomemcopy without -nocompress is deprecated.\n")
|
||||
}
|
||||
|
||||
must(validateOutput(output))
|
||||
return
|
||||
}
|
||||
|
||||
func validateOutput(output string) error {
|
||||
stat, err := os.Lstat(output)
|
||||
if err == nil {
|
||||
if stat.IsDir() {
|
||||
return errors.New("output path is a directory")
|
||||
}
|
||||
|
||||
return nil
|
||||
} else if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
// File does not exist. This is fine, just make
|
||||
// sure the directory it is to be in exists.
|
||||
if dir, _ := filepath.Split(output); dir != "" {
|
||||
return os.MkdirAll(dir, 0744)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseInput determines whether the given path has a recursive indicator and
|
||||
// returns a new path with the recursive indicator chopped off if it does.
|
||||
//
|
||||
// ex:
|
||||
// /path/to/foo/... -> (/path/to/foo, true)
|
||||
// /path/to/bar -> (/path/to/bar, false)
|
||||
func parseInput(input string) (path string, recursive bool) {
|
||||
return filepath.Clean(strings.TrimSuffix(input, "/...")),
|
||||
strings.HasSuffix(input, "/...")
|
||||
}
|
73
tools/vendor/github.com/tmthrgd/go-bindata/header.go
generated
vendored
Normal file
73
tools/vendor/github.com/tmthrgd/go-bindata/header.go
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/template"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func init() {
|
||||
template.Must(baseTemplate.New("header").Funcs(template.FuncMap{
|
||||
"trimright": func(s string) string {
|
||||
return strings.TrimRightFunc(s, unicode.IsSpace)
|
||||
},
|
||||
"toslash": filepath.ToSlash,
|
||||
}).Parse(`{{- /* This makes e.g. Github ignore diffs in generated files. */ -}}
|
||||
// Code generated by go-bindata. DO NOT EDIT.
|
||||
{{if $.Dev -}}
|
||||
// debug: dev
|
||||
{{else if $.Debug -}}
|
||||
// debug: true
|
||||
{{end -}}
|
||||
{{- if $.MemCopy -}}
|
||||
// memcopy: true
|
||||
{{end -}}
|
||||
{{- if $.Compress -}}
|
||||
// compress: true
|
||||
{{end -}}
|
||||
{{- if and $.Compress $.DecompressOnce -}}
|
||||
// decompress: once
|
||||
{{end -}}
|
||||
{{- if $.Metadata -}}
|
||||
// metadata: true
|
||||
{{end -}}
|
||||
{{- if $.Mode -}}
|
||||
// mode: {{printf "%04o" $.Mode}}
|
||||
{{end -}}
|
||||
{{- if $.ModTime -}}
|
||||
// modtime: {{$.ModTime}}
|
||||
{{end -}}
|
||||
{{- if $.AssetDir -}}
|
||||
// asset-dir: true
|
||||
{{end -}}
|
||||
{{- if $.Restore -}}
|
||||
// restore: true
|
||||
{{end -}}
|
||||
{{- if $.Hash -}}
|
||||
{{- if $.HashFormat -}}
|
||||
// hash-format: {{$.HashFormat}}
|
||||
{{else -}}
|
||||
// hash-format: unchanged
|
||||
{{end -}}
|
||||
{{- if and $.HashFormat $.HashLength (ne $.HashLength 16) -}}
|
||||
// hash-length: {{$.HashLength}}
|
||||
{{end -}}
|
||||
{{- if and $.HashFormat $.HashEncoding -}}
|
||||
// hash-encoding: {{$.HashEncoding}}
|
||||
{{end -}}
|
||||
{{- end -}}
|
||||
// sources:
|
||||
{{range .Assets -}}
|
||||
// {{toslash (trimright .Path)}}
|
||||
{{end}}
|
||||
{{if $.Tags -}} // +build {{$.Tags}}
|
||||
|
||||
{{end -}}
|
||||
|
||||
package {{$.Package}}`))
|
||||
}
|
31
tools/vendor/github.com/tmthrgd/go-bindata/internal/identifier/identifier.go
generated
vendored
Normal file
31
tools/vendor/github.com/tmthrgd/go-bindata/internal/identifier/identifier.go
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package identifier
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Identifier removes all characters from a string that are not valid in
|
||||
// an identifier according to the Go Programming Language Specification.
|
||||
//
|
||||
// The logic in the switch statement was taken from go/source package:
|
||||
// https://github.com/golang/go/blob/a1a688fa0012f7ce3a37e9ac0070461fe8e3f28e/src/go/scanner/scanner.go#L257-#L271
|
||||
func Identifier(val string) string {
|
||||
return strings.TrimLeftFunc(strings.Map(func(ch rune) rune {
|
||||
switch {
|
||||
case 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' ||
|
||||
ch >= utf8.RuneSelf && unicode.IsLetter(ch):
|
||||
return ch
|
||||
case '0' <= ch && ch <= '9' ||
|
||||
ch >= utf8.RuneSelf && unicode.IsDigit(ch):
|
||||
return ch
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
}, val), unicode.IsDigit)
|
||||
}
|
61
tools/vendor/github.com/tmthrgd/go-bindata/name.go
generated
vendored
Normal file
61
tools/vendor/github.com/tmthrgd/go-bindata/name.go
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Name applies name hashing if required. It returns the original
|
||||
// name for NoHash and NameUnchanged and returns the mangledName
|
||||
// otherwise.
|
||||
func (asset *binAsset) Name() string {
|
||||
if asset.Hash == nil || asset.opts.HashFormat == NameUnchanged {
|
||||
return asset.File.Name()
|
||||
} else if asset.mangledName != "" {
|
||||
return asset.mangledName
|
||||
}
|
||||
|
||||
var enc string
|
||||
switch asset.opts.HashEncoding {
|
||||
case HexHash:
|
||||
enc = hex.EncodeToString(asset.Hash)
|
||||
case Base32Hash:
|
||||
enc = base32Enc.EncodeToString(asset.Hash)
|
||||
case Base64Hash:
|
||||
enc = base64.RawURLEncoding.EncodeToString(asset.Hash)
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
l := asset.opts.HashLength
|
||||
if l == 0 {
|
||||
l = 16
|
||||
}
|
||||
|
||||
if l < uint(len(enc)) {
|
||||
enc = enc[:l]
|
||||
}
|
||||
|
||||
dir, file := path.Split(asset.File.Name())
|
||||
ext := path.Ext(file)
|
||||
|
||||
switch asset.opts.HashFormat {
|
||||
case DirHash:
|
||||
asset.mangledName = path.Join(dir, enc, file)
|
||||
case NameHashSuffix:
|
||||
file = strings.TrimSuffix(file, ext)
|
||||
asset.mangledName = path.Join(dir, file+"-"+enc+ext)
|
||||
case HashWithExt:
|
||||
asset.mangledName = path.Join(dir, enc+ext)
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
return asset.mangledName
|
||||
}
|
328
tools/vendor/github.com/tmthrgd/go-bindata/release.go
generated
vendored
Normal file
328
tools/vendor/github.com/tmthrgd/go-bindata/release.go
generated
vendored
Normal file
@ -0,0 +1,328 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/flate"
|
||||
"io"
|
||||
"path"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
var flatePool sync.Pool
|
||||
|
||||
func writeWrappedString(write func(io.Writer) error, indent string, wrapAt int) (string, error) {
|
||||
buf := bufPool.Get().(*bytes.Buffer)
|
||||
defer func() {
|
||||
buf.Reset()
|
||||
bufPool.Put(buf)
|
||||
}()
|
||||
|
||||
buf.WriteString("(\"\" +\n")
|
||||
buf.WriteString(indent)
|
||||
buf.WriteByte('"')
|
||||
|
||||
if err := write(&stringWriter{
|
||||
Writer: buf,
|
||||
Indent: indent,
|
||||
WrapAt: wrapAt,
|
||||
}); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
buf.WriteString("\")")
|
||||
|
||||
s := buf.String()
|
||||
|
||||
if strings.IndexByte(s[1:], '(') == -1 {
|
||||
s = s[1 : len(s)-1]
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
template.Must(template.Must(baseTemplate.New("release").Funcs(template.FuncMap{
|
||||
"base": path.Base,
|
||||
"wrap": func(data []byte, indent string, wrapAt int) (string, error) {
|
||||
return writeWrappedString(func(w io.Writer) error {
|
||||
_, err := w.Write(data)
|
||||
return err
|
||||
}, indent, wrapAt)
|
||||
},
|
||||
"read": func(asset binAsset, indent string, wrapAt int) (string, error) {
|
||||
return writeWrappedString(asset.copy, indent, wrapAt)
|
||||
},
|
||||
"flate": func(asset binAsset, indent string, wrapAt int) (out string, err error) {
|
||||
return writeWrappedString(func(w io.Writer) error {
|
||||
fw, _ := flatePool.Get().(*flate.Writer)
|
||||
if fw != nil {
|
||||
fw.Reset(w)
|
||||
} else if fw, err = flate.NewWriter(w, flate.BestCompression); err != nil {
|
||||
return err
|
||||
}
|
||||
defer flatePool.Put(fw)
|
||||
|
||||
if err := asset.copy(fw); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fw.Close()
|
||||
}, indent, wrapAt)
|
||||
},
|
||||
"format": formatTemplate,
|
||||
}).Parse(`
|
||||
{{- $unsafeRead := and (not $.Compress) (not $.MemCopy) -}}
|
||||
import (
|
||||
{{- if $.Compress}}
|
||||
"bytes"
|
||||
"compress/flate"
|
||||
"io"
|
||||
{{- end}}
|
||||
"os"
|
||||
"path/filepath"
|
||||
{{- if $unsafeRead}}
|
||||
"reflect"
|
||||
{{- end}}
|
||||
{{- if or $.Compress $.AssetDir}}
|
||||
"strings"
|
||||
{{- end}}
|
||||
{{- if and $.Compress $.DecompressOnce}}
|
||||
"sync"
|
||||
{{- end}}
|
||||
"time"
|
||||
{{- if $unsafeRead}}
|
||||
"unsafe"
|
||||
{{- end}}
|
||||
{{- if $.Restore}}
|
||||
|
||||
"github.com/tmthrgd/go-bindata/restore"
|
||||
{{- end}}
|
||||
)
|
||||
|
||||
{{if $unsafeRead -}}
|
||||
func bindataRead(data string) []byte {
|
||||
var empty [0]byte
|
||||
sx := (*reflect.StringHeader)(unsafe.Pointer(&data))
|
||||
b := empty[:]
|
||||
bx := (*reflect.SliceHeader)(unsafe.Pointer(&b))
|
||||
bx.Data = sx.Data
|
||||
bx.Len = len(data)
|
||||
bx.Cap = bx.Len
|
||||
return b
|
||||
}
|
||||
|
||||
{{end -}}
|
||||
|
||||
type asset struct {
|
||||
name string
|
||||
{{- if and $.Hash $.HashFormat}}
|
||||
orig string
|
||||
{{- end}}
|
||||
data string
|
||||
{{- if $.Compress}}
|
||||
size int64
|
||||
{{- end -}}
|
||||
{{- if and $.Metadata (not $.Mode)}}
|
||||
mode os.FileMode
|
||||
{{- end -}}
|
||||
{{- if and $.Metadata (not $.ModTime)}}
|
||||
time time.Time
|
||||
{{- end -}}
|
||||
{{- if $.Hash}}
|
||||
hash string
|
||||
{{- end}}
|
||||
{{- if and $.Compress $.DecompressOnce}}
|
||||
|
||||
once sync.Once
|
||||
bytes []byte
|
||||
err error
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
func (a *asset) Name() string {
|
||||
return a.name
|
||||
}
|
||||
|
||||
func (a *asset) Size() int64 {
|
||||
{{- if $.Compress}}
|
||||
return a.size
|
||||
{{- else}}
|
||||
return int64(len(a.data))
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
func (a *asset) Mode() os.FileMode {
|
||||
{{- if $.Mode}}
|
||||
return {{printf "%04o" $.Mode}}
|
||||
{{- else if $.Metadata}}
|
||||
return a.mode
|
||||
{{- else}}
|
||||
return 0
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
func (a *asset) ModTime() time.Time {
|
||||
{{- if $.ModTime}}
|
||||
return time.Unix({{$.ModTime}}, 0)
|
||||
{{- else if $.Metadata}}
|
||||
return a.time
|
||||
{{- else}}
|
||||
return time.Time{}
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
func (*asset) IsDir() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (*asset) Sys() interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
{{- if $.Hash}}
|
||||
|
||||
func (a *asset) OriginalName() string {
|
||||
{{- if $.HashFormat}}
|
||||
return a.orig
|
||||
{{- else}}
|
||||
return a.name
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
func (a *asset) FileHash() []byte {
|
||||
{{- if $unsafeRead}}
|
||||
return bindataRead(a.hash)
|
||||
{{- else}}
|
||||
return []byte(a.hash)
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
type FileInfo interface {
|
||||
os.FileInfo
|
||||
|
||||
OriginalName() string
|
||||
FileHash() []byte
|
||||
}
|
||||
{{- end}}
|
||||
|
||||
// _bindata is a table, holding each asset generator, mapped to its name.
|
||||
var _bindata = map[string]*asset{
|
||||
{{range $.Assets}} {{printf "%q" .Name}}: &asset{
|
||||
name: {{printf "%q" (base .Name)}},
|
||||
{{- if and $.Hash $.HashFormat}}
|
||||
orig: {{printf "%q" .File.Name}},
|
||||
{{- end}}
|
||||
data: {{if $.Compress -}}
|
||||
{{flate . "\t\t\t" 24}}
|
||||
{{- else -}}
|
||||
{{read . "\t\t\t" 24}}
|
||||
{{- end}},
|
||||
|
||||
{{- if or $.Metadata $.Compress -}}
|
||||
{{- $info := .Stat -}}
|
||||
|
||||
{{- if $.Compress}}
|
||||
size: {{$info.Size}},
|
||||
{{- end -}}
|
||||
|
||||
{{- if and $.Metadata (not $.Mode)}}
|
||||
mode: {{printf "%04o" $info.Mode}},
|
||||
{{- end -}}
|
||||
|
||||
{{- if and $.Metadata (not $.ModTime)}}
|
||||
{{$mod := $info.ModTime -}}
|
||||
time: time.Unix({{$mod.Unix}}, {{$mod.Nanosecond}}),
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{- if $.Hash}}
|
||||
hash: {{wrap .Hash "\t\t\t" 24}},
|
||||
{{- end}}
|
||||
},
|
||||
{{end -}}
|
||||
}
|
||||
|
||||
// AssetAndInfo loads and returns the asset and asset info for the
|
||||
// given name. It returns an error if the asset could not be found
|
||||
// or could not be loaded.
|
||||
func AssetAndInfo(name string) ([]byte, os.FileInfo, error) {
|
||||
a, ok := _bindata[filepath.ToSlash(name)]
|
||||
if !ok {
|
||||
return nil, nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
{{if and $.Compress $.DecompressOnce}}
|
||||
a.once.Do(func() {
|
||||
fr := flate.NewReader(strings.NewReader(a.data))
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, a.err = io.Copy(&buf, fr); a.err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if a.err = fr.Close(); a.err == nil {
|
||||
a.bytes = buf.Bytes()
|
||||
}
|
||||
})
|
||||
if a.err != nil {
|
||||
return nil, nil, &os.PathError{Op: "read", Path: name, Err: a.err}
|
||||
}
|
||||
|
||||
return a.bytes, a, nil
|
||||
{{- else if $.Compress}}
|
||||
fr := flate.NewReader(strings.NewReader(a.data))
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, err := io.Copy(&buf, fr); err != nil {
|
||||
return nil, nil, &os.PathError{Op: "read", Path: name, Err: err}
|
||||
}
|
||||
|
||||
if err := fr.Close(); err != nil {
|
||||
return nil, nil, &os.PathError{Op: "read", Path: name, Err: err}
|
||||
}
|
||||
|
||||
return buf.Bytes(), a, nil
|
||||
{{- else if $unsafeRead}}
|
||||
return bindataRead(a.data), a, nil
|
||||
{{- else}}
|
||||
return []byte(a.data), a, nil
|
||||
{{- end}}
|
||||
}
|
||||
|
||||
// AssetInfo loads and returns the asset info for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func AssetInfo(name string) (os.FileInfo, error) {
|
||||
a, ok := _bindata[filepath.ToSlash(name)]
|
||||
if !ok {
|
||||
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
{{- if and $.Hash $.HashFormat}}
|
||||
|
||||
{{format "hashnames" $}}
|
||||
|
||||
// AssetName returns the hashed name associated with an asset of a
|
||||
// given name.
|
||||
func AssetName(name string) (string, error) {
|
||||
if name, ok := _hashNames[filepath.ToSlash(name)]; ok {
|
||||
return name, nil
|
||||
}
|
||||
|
||||
return "", &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
{{- end}}`)).New("hashnames").Parse(`
|
||||
var _hashNames = map[string]string{
|
||||
{{range .Assets -}}
|
||||
{{printf "%q" .File.Name}}: {{printf "%q" .Name}},
|
||||
{{end -}}
|
||||
}`))
|
||||
}
|
71
tools/vendor/github.com/tmthrgd/go-bindata/stringwriter.go
generated
vendored
Normal file
71
tools/vendor/github.com/tmthrgd/go-bindata/stringwriter.go
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import "io"
|
||||
|
||||
var (
|
||||
stringWriterLinePrefix = []byte(`"`)
|
||||
stringWriterLineSuffix = []byte("\" +\n")
|
||||
stringWriterParensLineSuffix = []byte("\") + (\"\" +\n")
|
||||
)
|
||||
|
||||
type stringWriter struct {
|
||||
io.Writer
|
||||
Indent string
|
||||
WrapAt int
|
||||
c, l int
|
||||
}
|
||||
|
||||
func (w *stringWriter) Write(p []byte) (n int, err error) {
|
||||
buf := [4]byte{'\\', 'x', 0, 0}
|
||||
|
||||
for _, b := range p {
|
||||
const lowerHex = "0123456789abcdef"
|
||||
buf[2] = lowerHex[b/16]
|
||||
buf[3] = lowerHex[b%16]
|
||||
|
||||
if _, err = w.Writer.Write(buf[:]); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
n++
|
||||
w.c++
|
||||
|
||||
if w.WrapAt == 0 || w.c%w.WrapAt != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
w.l++
|
||||
|
||||
suffix := stringWriterLineSuffix
|
||||
if w.l%500 == 0 {
|
||||
// As per https://golang.org/issue/18078, the compiler has trouble
|
||||
// compiling the concatenation of many strings, s0 + s1 + s2 + ... + sN,
|
||||
// for large N. We insert redundant, explicit parentheses to work around
|
||||
// that, lowering the N at any given step: (s0 + s1 + ... + s499) + (s500 +
|
||||
// ... + s1999) + etc + (etc + ... + sN).
|
||||
//
|
||||
// This fix was taken from the fix applied to x/text in
|
||||
// https://github.com/golang/text/commit/5c6cf4f9a2.
|
||||
|
||||
suffix = stringWriterParensLineSuffix
|
||||
}
|
||||
|
||||
if _, err = w.Writer.Write(suffix); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if _, err = io.WriteString(w.Writer, w.Indent); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if _, err = w.Writer.Write(stringWriterLinePrefix); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
98
tools/vendor/github.com/tmthrgd/go-bindata/tree.go
generated
vendored
Normal file
98
tools/vendor/github.com/tmthrgd/go-bindata/tree.go
generated
vendored
Normal file
@ -0,0 +1,98 @@
|
||||
// Copyright 2017 Tom Thorogood. All rights reserved.
|
||||
// Use of this source code is governed by a Modified
|
||||
// BSD License that can be found in the LICENSE file.
|
||||
|
||||
package bindata
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type assetTree struct {
|
||||
Asset binAsset
|
||||
Children map[string]*assetTree
|
||||
Depth int
|
||||
}
|
||||
|
||||
func newAssetTree() *assetTree {
|
||||
return &assetTree{
|
||||
Children: make(map[string]*assetTree),
|
||||
}
|
||||
}
|
||||
|
||||
func (node *assetTree) child(name string) *assetTree {
|
||||
rv, ok := node.Children[name]
|
||||
if !ok {
|
||||
rv = newAssetTree()
|
||||
rv.Depth = node.Depth + 1
|
||||
node.Children[name] = rv
|
||||
}
|
||||
|
||||
return rv
|
||||
}
|
||||
|
||||
func init() {
|
||||
template.Must(template.Must(baseTemplate.New("tree").Funcs(template.FuncMap{
|
||||
"tree": func(toc []binAsset) *assetTree {
|
||||
tree := newAssetTree()
|
||||
for _, asset := range toc {
|
||||
node := tree
|
||||
for _, name := range strings.Split(asset.Name(), "/") {
|
||||
node = node.child(name)
|
||||
}
|
||||
|
||||
node.Asset = asset
|
||||
}
|
||||
|
||||
return tree
|
||||
},
|
||||
"format": formatTemplate,
|
||||
}).Parse(`// AssetDir returns the file names below a certain
|
||||
// directory embedded in the file by go-bindata.
|
||||
// For example if you run go-bindata on data/... and data contains the
|
||||
// following hierarchy:
|
||||
// data/
|
||||
// foo.txt
|
||||
// img/
|
||||
// a.png
|
||||
// b.png
|
||||
// then AssetDir("data") would return []string{"foo.txt", "img"}
|
||||
// AssetDir("data/img") would return []string{"a.png", "b.png"}
|
||||
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
|
||||
// AssetDir("") will return []string{"data"}.
|
||||
func AssetDir(name string) ([]string, error) {
|
||||
node := _bintree
|
||||
|
||||
if name != "" {
|
||||
var ok bool
|
||||
for _, p := range strings.Split(filepath.ToSlash(name), "/") {
|
||||
if node, ok = node[p]; !ok {
|
||||
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(node) == 0 {
|
||||
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
|
||||
}
|
||||
|
||||
rv := make([]string, 0, len(node))
|
||||
for name := range node {
|
||||
rv = append(rv, name)
|
||||
}
|
||||
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
type bintree map[string]bintree
|
||||
|
||||
{{format "bintree" (tree .Assets)}}`)).New("bintree").Parse(`
|
||||
{{- if not .Depth -}}
|
||||
var _bintree = {{end -}}
|
||||
bintree{
|
||||
{{range $k, $v := .Children -}}
|
||||
{{printf "%q" $k}}: {{template "bintree" $v}},
|
||||
{{end -}}
|
||||
}`))
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user