mirror of
https://github.com/prometheus-community/postgres_exporter
synced 2025-05-01 07:28:12 +00:00
Add self-contained gometalinter build tooling.
This commit is contained in:
parent
0de0311c22
commit
e2b6c973a1
2
.gitignore
vendored
2
.gitignore
vendored
@ -8,4 +8,6 @@ postgres_exporter_integration_test
|
||||
*.iml
|
||||
cover.out
|
||||
cover.*.out
|
||||
.coverage
|
||||
*.prom
|
||||
|
||||
|
@ -3,7 +3,7 @@ services:
|
||||
- docker
|
||||
language: go
|
||||
go:
|
||||
- '1.7.5'
|
||||
- '1.8'
|
||||
# Make sure we have p2 and the postgres client.
|
||||
before_install:
|
||||
- go get -v github.com/mattn/goveralls
|
||||
|
47
Makefile
47
Makefile
@ -1,10 +1,21 @@
|
||||
|
||||
GO_SRC := $(shell find -type f -name '*.go' ! -path '*/vendor/*')
|
||||
COVERDIR = .coverage
|
||||
TOOLDIR = tools
|
||||
|
||||
GO_SRC := $(shell find . -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' )
|
||||
GO_DIRS := $(shell find . -type d -name '*.go' ! -path '*/vendor/*' ! -path 'tools/*' )
|
||||
GO_PKGS := $(shell go list ./... | grep -v '/vendor/')
|
||||
|
||||
CONTAINER_NAME ?= wrouesnel/postgres_exporter:latest
|
||||
VERSION ?= $(shell git describe --dirty)
|
||||
|
||||
all: vet test postgres_exporter
|
||||
CONCURRENT_LINTERS ?= $(shell cat /proc/cpuinfo | grep processor | wc -l)
|
||||
LINTER_DEADLINE ?= 30s
|
||||
|
||||
export PATH := $(TOOLDIR)/bin:$(PATH)
|
||||
SHELL := env PATH=$(PATH) /bin/bash
|
||||
|
||||
all: style lint test postgres_exporter
|
||||
|
||||
# Cross compilation (e.g. if you are on a Mac)
|
||||
cross: docker-build docker
|
||||
@ -21,19 +32,23 @@ postgres_exporter_integration_test: $(GO_SRC)
|
||||
docker: postgres_exporter
|
||||
docker build -t $(CONTAINER_NAME) .
|
||||
|
||||
vet:
|
||||
go vet
|
||||
style: tools
|
||||
gometalinter --disable-all --enable=gofmt --vendor
|
||||
|
||||
# Check code conforms to go fmt
|
||||
style:
|
||||
! gofmt -s -l $(GO_SRC) 2>&1 | read 2>/dev/null
|
||||
lint: tools
|
||||
@echo Using $(CONCURRENT_LINTERS) processes
|
||||
gometalinter -j $(CONCURRENT_LINTERS) --deadline=$(LINTER_DEADLINE) --disable=gotype --disable=gocyclo $(GO_DIRS)
|
||||
|
||||
# Format the code
|
||||
fmt:
|
||||
fmt: tools
|
||||
gofmt -s -w $(GO_SRC)
|
||||
|
||||
test:
|
||||
go test -v -covermode count -coverprofile=cover.test.out
|
||||
test: tools
|
||||
@rm -rf $(COVERDIR)
|
||||
@mkdir -p $(COVERDIR)
|
||||
for pkg in $(GO_PKGS) ; do \
|
||||
go test -v -covermode count -coverprofile=$(COVERDIR)/$(echo $$pkg | tr '/' '-').out $(pkg) ; \
|
||||
done
|
||||
gocovmerge $(shell find $(COVERDIR) -name '*.out') > cover.out
|
||||
|
||||
test-integration: postgres_exporter postgres_exporter_integration_test
|
||||
tests/test-smoke "$(shell pwd)/postgres_exporter" "$(shell pwd)/postgres_exporter_integration_test_script $(shell pwd)/postgres_exporter_integration_test $(shell pwd)/cover.integration.out"
|
||||
@ -45,11 +60,17 @@ docker-build:
|
||||
-v $(shell pwd):/real_src \
|
||||
-e SHELL_UID=$(shell id -u) -e SHELL_GID=$(shell id -g) \
|
||||
-w /go/src/github.com/wrouesnel/postgres_exporter \
|
||||
golang:1.7-wheezy \
|
||||
golang:1.8-wheezy \
|
||||
/bin/bash -c "make >&2 && chown $$SHELL_UID:$$SHELL_GID ./postgres_exporter"
|
||||
docker build -t $(CONTAINER_NAME) .
|
||||
|
||||
push:
|
||||
docker push $(CONTAINER_NAME)
|
||||
|
||||
.PHONY: docker-build docker test vet push cross
|
||||
tools:
|
||||
$(MAKE) -C $(TOOLDIR)
|
||||
|
||||
clean:
|
||||
rm -f postgres_exporter postgres_exporter_integration_test
|
||||
|
||||
.PHONY: tools docker-build docker lint fmt test vet push cross clean
|
||||
|
@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Concatenate a list of coverage reports
|
||||
# Usage: concatenate_coverage <out> [[test] ...]
|
||||
|
||||
output_file=$1
|
||||
shift
|
||||
cat $1 > $output_file
|
||||
shift
|
||||
|
||||
for f in $@ ; do
|
||||
tail -n +2 $f >> $output_file
|
||||
done
|
@ -12,7 +12,7 @@ type PgSettingSuite struct{}
|
||||
var _ = Suite(&PgSettingSuite{})
|
||||
|
||||
var fixtures = []fixture{
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "seconds_fixture_metric",
|
||||
setting: "5",
|
||||
@ -28,7 +28,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_seconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
|
||||
v: 5,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "milliseconds_fixture_metric",
|
||||
setting: "5000",
|
||||
@ -44,7 +44,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_milliseconds_fixture_metric_seconds\", help: \"Foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
|
||||
v: 5,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "eight_kb_fixture_metric",
|
||||
setting: "17",
|
||||
@ -60,7 +60,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_eight_kb_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
|
||||
v: 139264,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "16_mb_real_fixture_metric",
|
||||
setting: "3.0",
|
||||
@ -76,7 +76,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_16_mb_real_fixture_metric_bytes\", help: \"Foo foo foo [Units converted to bytes.]\", constLabels: {}, variableLabels: []}",
|
||||
v: 5.0331648e+07,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "bool_on_fixture_metric",
|
||||
setting: "on",
|
||||
@ -92,7 +92,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_bool_on_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
|
||||
v: 1,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "bool_off_fixture_metric",
|
||||
setting: "off",
|
||||
@ -108,7 +108,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_bool_off_fixture_metric\", help: \"Foo foo foo\", constLabels: {}, variableLabels: []}",
|
||||
v: 0,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "special_minus_one_value",
|
||||
setting: "-1",
|
||||
@ -124,7 +124,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_special_minus_one_value_seconds\", help: \"foo foo foo [Units converted to seconds.]\", constLabels: {}, variableLabels: []}",
|
||||
v: -1,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "rds.rds_superuser_reserved_connections",
|
||||
setting: "2",
|
||||
@ -140,7 +140,7 @@ var fixtures = []fixture{
|
||||
d: "Desc{fqName: \"pg_settings_rds_rds_superuser_reserved_connections\", help: \"Sets the number of connection slots reserved for rds_superusers.\", constLabels: {}, variableLabels: []}",
|
||||
v: 2,
|
||||
},
|
||||
fixture{
|
||||
{
|
||||
p: pgSetting{
|
||||
name: "unknown_unit",
|
||||
setting: "10",
|
||||
|
4
tools/.gitignore
vendored
Normal file
4
tools/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
pkg
|
||||
bin
|
||||
tools.deps
|
||||
metatools.deps
|
64
tools/Makefile
Normal file
64
tools/Makefile
Normal file
@ -0,0 +1,64 @@
|
||||
# Makefile to build the tools used in the build system.
|
||||
# If recreating from scratch, you will need a local install of govendor
|
||||
# and to run govendor init in this folder before running govendor fetch.
|
||||
|
||||
# Ensure we use local bin dir
|
||||
export PATH := bin:$(PATH)
|
||||
SHELL := env PATH=$(PATH) /bin/bash
|
||||
|
||||
THIS_FILE := $(lastword $(MAKEFILE_LIST))
|
||||
|
||||
# This function is used to get the linters used by metalinter
|
||||
get_metalinters := gometalinter --help | grep -oP ' \w+ \(.+\)' | tr -s ' ' | cut -d' ' -f3 | grep -oP '[^()]+'
|
||||
|
||||
TOOL_SRCS := github.com/kardianos/govendor \
|
||||
github.com/wadey/gocovmerge \
|
||||
github.com/mattn/goveralls \
|
||||
github.com/alecthomas/gometalinter
|
||||
|
||||
METATOOL_SRCS :=
|
||||
|
||||
GO_SRC := $(shell find $(SOURCEDIR) -name '*.go')
|
||||
|
||||
GO := GOPATH=$(shell pwd) go
|
||||
|
||||
DEFAULT: all
|
||||
|
||||
tools.deps: $(GO_SRC)
|
||||
# Generate build patterns for static tools
|
||||
for pkg in $(TOOL_SRCS); do \
|
||||
echo -e "bin/$$(basename $$pkg): $$GO_SRC\n\t\$$(GO) install -v $$pkg" ; \
|
||||
done > tools.deps
|
||||
|
||||
metatools.deps: tools.deps bin/gometalinter $(GO_SRC)
|
||||
# Generate build patterns for metalinters tools
|
||||
echo -e "METATOOL_SRCS+=$(shell $(get_metalinters))" > metatools.deps
|
||||
for pkg in $(shell $(get_metalinters)) ; do \
|
||||
echo -e "bin/$$(basename $$pkg): $$GO_SRC\n\t\$$(GO) install -v $$pkg" ; \
|
||||
done >> metatools.deps
|
||||
|
||||
include tools.deps
|
||||
include metatools.deps
|
||||
|
||||
update:
|
||||
# Fetch govendor, then rebuild govendor.
|
||||
govendor fetch github.com/kardianos/govendor
|
||||
$(GO) install -v github.com/kardianos/govendor
|
||||
# Fetch gometalinter and rebuild gometalinter.
|
||||
govendor fetch github.com/alecthomas/gometalinter
|
||||
$(GO) install -v github.com/alecthomas/gometalinter
|
||||
$(MAKE) -f $(THIS_FILE) update-phase-2
|
||||
|
||||
update-phase-2:
|
||||
# Fetch the new metalinter list.
|
||||
for pkg in $(TOOL_SRCS) $$($(get_metalinters)); do \
|
||||
govendor fetch -v $$pkg ; \
|
||||
done
|
||||
|
||||
clean:
|
||||
rm -rf bin pkg tools.deps metatools.deps
|
||||
|
||||
all: $(addprefix bin/,$(notdir $(TOOL_SRCS) $(METATOOL_SRCS) ))
|
||||
|
||||
# TOOL_SRCS is included here since we'll never really have these files.
|
||||
.PHONY: all update clean $(TOOL_SRCS) $(METATOOL_SRCS)
|
9
tools/README.md
Normal file
9
tools/README.md
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
Vendored versions of the build tooling.
|
||||
|
||||
gocovmerge is used to merge coverage reports for uploading to a service like
|
||||
coveralls, and gometalinter conveniently incorporates multiple Go linters.
|
||||
|
||||
By vendoring both, we gain a self-contained build system.
|
||||
|
||||
Run `make all` to build, and `make update` to update.
|
7
tools/vendor/github.com/Bowery/prompt/CONTRIBUTORS.md
generated
vendored
Normal file
7
tools/vendor/github.com/Bowery/prompt/CONTRIBUTORS.md
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
- [Larz Conwell](https://github.com/larzconwell)
|
||||
- [Steve Kaliski](https://github.com/sjkaliski)
|
||||
- [NHOrus](https://github.com/NHOrus)
|
||||
- [Attila Fülöp](https://github.com/AttilaFueloep)
|
||||
- [Gereon Frey](https://github.com/gfrey)
|
||||
- [Aaron Bieber](https://github.com/qbit)
|
||||
- [Ricky Medina](https://github.com/r-medina)
|
21
tools/vendor/github.com/Bowery/prompt/LICENSE
generated
vendored
Normal file
21
tools/vendor/github.com/Bowery/prompt/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013-2015 Bowery, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
38
tools/vendor/github.com/Bowery/prompt/README.md
generated
vendored
Normal file
38
tools/vendor/github.com/Bowery/prompt/README.md
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
# Prompt
|
||||
|
||||
[](https://circleci.com/gh/Bowery/prompt/tree/master)
|
||||
|
||||
[](https://godoc.org/github.com/Bowery/prompt)
|
||||
|
||||
Prompt is a cross platform line-editing prompting library. Read the GoDoc page
|
||||
for more info and for API details.
|
||||
|
||||
## Features
|
||||
- Keyboard shortcuts in prompts
|
||||
- History support
|
||||
- Secure password prompt
|
||||
- Custom prompt support
|
||||
- Fallback prompt for unsupported terminals
|
||||
- ANSI conversion for Windows
|
||||
|
||||
## Todo
|
||||
- Multi-line prompt as a Terminal option
|
||||
- Make refresh less jittery on Windows([possible reason](https://github.com/Bowery/prompt/blob/master/output_windows.go#L108))
|
||||
- Multi-byte character support on Windows
|
||||
- `AnsiWriter` should execute the equivalent ANSI escape code functionality on Windows
|
||||
- Support for more ANSI escape codes on Windows.
|
||||
- More keyboard shortcuts from Readlines shortcut list
|
||||
|
||||
## Contributing
|
||||
|
||||
Make sure Go is setup and running the latest release version, and make sure your `GOPATH` is setup properly.
|
||||
|
||||
Follow the guidelines [here](https://guides.github.com/activities/contributing-to-open-source/#contributing).
|
||||
|
||||
Please be sure to `gofmt` any code before doing commits. You can simply run `gofmt -w .` to format all the code in the directory.
|
||||
|
||||
Lastly don't forget to add your name to [`CONTRIBUTORS.md`](https://github.com/Bowery/prompt/blob/master/CONTRIBUTORS.md)
|
||||
|
||||
## License
|
||||
|
||||
Prompt is MIT licensed, details can be found [here](https://raw.githubusercontent.com/Bowery/prompt/master/LICENSE).
|
39
tools/vendor/github.com/Bowery/prompt/ansi_unix.go
generated
vendored
Normal file
39
tools/vendor/github.com/Bowery/prompt/ansi_unix.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
// +build linux darwin freebsd openbsd netbsd dragonfly solaris
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
// AnsiReader is an io.Reader that wraps an *os.File.
|
||||
type AnsiReader struct {
|
||||
file *os.File
|
||||
}
|
||||
|
||||
// NewAnsiReader creates a AnsiReader from the given input file.
|
||||
func NewAnsiReader(in *os.File) *AnsiReader {
|
||||
return &AnsiReader{file: in}
|
||||
}
|
||||
|
||||
// Read reads data from the input file into b.
|
||||
func (ar *AnsiReader) Read(b []byte) (int, error) {
|
||||
return ar.file.Read(b)
|
||||
}
|
||||
|
||||
// AnsiWriter is an io.Writer that wraps an *os.File.
|
||||
type AnsiWriter struct {
|
||||
file *os.File
|
||||
}
|
||||
|
||||
// NewAnsiWriter creates a AnsiWriter from the given output file.
|
||||
func NewAnsiWriter(out *os.File) *AnsiWriter {
|
||||
return &AnsiWriter{file: out}
|
||||
}
|
||||
|
||||
// Write writes data from b into the input file.
|
||||
func (aw *AnsiWriter) Write(b []byte) (int, error) {
|
||||
return aw.file.Write(b)
|
||||
}
|
510
tools/vendor/github.com/Bowery/prompt/ansi_windows.go
generated
vendored
Normal file
510
tools/vendor/github.com/Bowery/prompt/ansi_windows.go
generated
vendored
Normal file
@ -0,0 +1,510 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// keyEventType is the key event type for an input record.
|
||||
const keyEventType = 0x0001
|
||||
|
||||
var (
|
||||
readConsoleInput = kernel.NewProc("ReadConsoleInputW")
|
||||
)
|
||||
|
||||
// inputRecord describes a input event from a console.
|
||||
type inputRecord struct {
|
||||
eventType uint16
|
||||
// Magic to get around the union C type, cast
|
||||
// event to the type using unsafe.Pointer.
|
||||
_ [2]byte
|
||||
event [16]byte
|
||||
}
|
||||
|
||||
// keyEventRecord describes a keyboard event.
|
||||
type keyEventRecord struct {
|
||||
keyDown int32
|
||||
repeatCount uint16
|
||||
virtualKeyCode uint16
|
||||
virtualScanCode uint16
|
||||
char uint16
|
||||
controlKeyState uint32
|
||||
}
|
||||
|
||||
// AnsiReader is an io.Reader that reads from a given file and converts Windows
|
||||
// key codes to their equivalent ANSI escape codes.
|
||||
type AnsiReader struct {
|
||||
fd uintptr
|
||||
buf []rune
|
||||
}
|
||||
|
||||
// NewAnsiReader creates a AnsiReader from the given input file.
|
||||
func NewAnsiReader(in *os.File) *AnsiReader {
|
||||
return &AnsiReader{fd: in.Fd()}
|
||||
}
|
||||
|
||||
// Read reads data from the input converting to ANSI escape codes that can be
|
||||
// read over multiple Reads.
|
||||
func (ar *AnsiReader) Read(b []byte) (int, error) {
|
||||
if len(b) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
if len(ar.buf) == 0 {
|
||||
var runes []rune
|
||||
var read uint32
|
||||
rec := new(inputRecord)
|
||||
|
||||
for runes == nil {
|
||||
ret, _, err := readConsoleInput.Call(ar.fd, uintptr(unsafe.Pointer(rec)),
|
||||
1, uintptr(unsafe.Pointer(&read)))
|
||||
if ret == 0 {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if rec.eventType != keyEventType {
|
||||
continue
|
||||
}
|
||||
|
||||
ke := (*keyEventRecord)(unsafe.Pointer(&rec.event))
|
||||
if ke.keyDown == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
shift := false
|
||||
if ke.controlKeyState&shiftKey != 0 {
|
||||
shift = true
|
||||
}
|
||||
|
||||
ctrl := false
|
||||
if ke.controlKeyState&leftCtrlKey != 0 || ke.controlKeyState&rightCtrlKey != 0 {
|
||||
ctrl = true
|
||||
}
|
||||
|
||||
alt := false
|
||||
if ke.controlKeyState&leftAltKey != 0 || ke.controlKeyState&rightAltKey != 0 {
|
||||
alt = true
|
||||
}
|
||||
|
||||
// Backspace, Return, Space.
|
||||
if ke.char == ctrlH || ke.char == returnKey || ke.char == spaceKey {
|
||||
code := string(returnKey)
|
||||
if ke.char == ctrlH {
|
||||
code = string(backKey)
|
||||
} else if ke.char == spaceKey {
|
||||
code = string(spaceKey)
|
||||
}
|
||||
|
||||
if alt {
|
||||
code = string(escKey) + code
|
||||
}
|
||||
|
||||
runes = []rune(code)
|
||||
break
|
||||
}
|
||||
|
||||
// Generate runes for the chars and key codes.
|
||||
if ke.char > 0 {
|
||||
runes = []rune{rune(ke.char)}
|
||||
} else {
|
||||
code := string(escKey)
|
||||
|
||||
switch ke.virtualKeyCode {
|
||||
case f1Key:
|
||||
if ctrl {
|
||||
continue
|
||||
}
|
||||
|
||||
code += ar.shortFunction("P", shift, ctrl, alt)
|
||||
case f2Key:
|
||||
code += ar.shortFunction("Q", shift, ctrl, alt)
|
||||
case f3Key:
|
||||
code += ar.shortFunction("R", shift, ctrl, alt)
|
||||
case f4Key:
|
||||
code += ar.shortFunction("S", shift, ctrl, alt)
|
||||
case f5Key:
|
||||
code += ar.longFunction("15", shift, ctrl, alt)
|
||||
case f6Key:
|
||||
code += ar.longFunction("17", shift, ctrl, alt)
|
||||
case f7Key:
|
||||
code += ar.longFunction("18", shift, ctrl, alt)
|
||||
case f8Key:
|
||||
code += ar.longFunction("19", shift, ctrl, alt)
|
||||
case f9Key:
|
||||
code += ar.longFunction("20", shift, ctrl, alt)
|
||||
case f10Key:
|
||||
code += ar.longFunction("21", shift, ctrl, alt)
|
||||
case f11Key:
|
||||
code += ar.longFunction("23", shift, ctrl, alt)
|
||||
case f12Key:
|
||||
code += ar.longFunction("24", shift, ctrl, alt)
|
||||
case insertKey:
|
||||
if shift || ctrl {
|
||||
continue
|
||||
}
|
||||
|
||||
code += ar.longFunction("2", shift, ctrl, alt)
|
||||
case deleteKey:
|
||||
code += ar.longFunction("3", shift, ctrl, alt)
|
||||
case homeKey:
|
||||
code += "OH"
|
||||
case endKey:
|
||||
code += "OF"
|
||||
case pgupKey:
|
||||
if shift {
|
||||
continue
|
||||
}
|
||||
|
||||
code += ar.longFunction("5", shift, ctrl, alt)
|
||||
case pgdownKey:
|
||||
if shift {
|
||||
continue
|
||||
}
|
||||
|
||||
code += ar.longFunction("6", shift, ctrl, alt)
|
||||
case upKey:
|
||||
code += ar.arrow("A", shift, ctrl, alt)
|
||||
case downKey:
|
||||
code += ar.arrow("B", shift, ctrl, alt)
|
||||
case leftKey:
|
||||
code += ar.arrow("D", shift, ctrl, alt)
|
||||
case rightKey:
|
||||
code += ar.arrow("C", shift, ctrl, alt)
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
runes = []rune(code)
|
||||
}
|
||||
}
|
||||
|
||||
ar.buf = runes
|
||||
}
|
||||
|
||||
// Get items from the buffer.
|
||||
var n int
|
||||
for i, r := range ar.buf {
|
||||
if utf8.RuneLen(r) > len(b) {
|
||||
ar.buf = ar.buf[i:]
|
||||
return n, nil
|
||||
}
|
||||
|
||||
nr := utf8.EncodeRune(b, r)
|
||||
b = b[nr:]
|
||||
n += nr
|
||||
}
|
||||
|
||||
ar.buf = nil
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// shortFunction creates a short function code.
|
||||
func (ar *AnsiReader) shortFunction(ident string, shift, ctrl, alt bool) string {
|
||||
code := "O"
|
||||
|
||||
if shift {
|
||||
code += "1;2"
|
||||
} else if ctrl {
|
||||
code += "1;5"
|
||||
} else if alt {
|
||||
code += "1;3"
|
||||
}
|
||||
|
||||
return code + ident
|
||||
}
|
||||
|
||||
// longFunction creates a long function code.
|
||||
func (ar *AnsiReader) longFunction(ident string, shift, ctrl, alt bool) string {
|
||||
code := "["
|
||||
code += ident
|
||||
|
||||
if shift {
|
||||
code += ";2"
|
||||
} else if ctrl {
|
||||
code += ";5"
|
||||
} else if alt {
|
||||
code += ";3"
|
||||
}
|
||||
|
||||
return code + "~"
|
||||
}
|
||||
|
||||
// arrow creates an arrow code.
|
||||
func (ar *AnsiReader) arrow(ident string, shift, ctrl, alt bool) string {
|
||||
code := "["
|
||||
|
||||
if shift {
|
||||
code += "1;2"
|
||||
} else if ctrl {
|
||||
code += "1;5"
|
||||
} else if alt {
|
||||
code += "1;3"
|
||||
}
|
||||
|
||||
return code + ident
|
||||
}
|
||||
|
||||
// AnsiWriter is an io.Writer that writes to a given file and converts ANSI
|
||||
// escape codes to their equivalent Windows functionality.
|
||||
type AnsiWriter struct {
|
||||
file *os.File
|
||||
buf []byte
|
||||
}
|
||||
|
||||
// NewAnsiWriter creates a AnsiWriter from the given output.
|
||||
func NewAnsiWriter(out *os.File) *AnsiWriter {
|
||||
return &AnsiWriter{file: out}
|
||||
}
|
||||
|
||||
// Write writes the buffer filtering out ANSI escape codes and converting to
|
||||
// the Windows functionality needed. ANSI escape codes may be found over multiple
|
||||
// Writes.
|
||||
func (aw *AnsiWriter) Write(b []byte) (int, error) {
|
||||
needsProcessing := bytes.Contains(b, []byte(string(escKey)))
|
||||
if len(aw.buf) > 0 {
|
||||
needsProcessing = true
|
||||
}
|
||||
|
||||
if !needsProcessing {
|
||||
return aw.file.Write(b)
|
||||
}
|
||||
var p []byte
|
||||
|
||||
for _, char := range b {
|
||||
// Found the beginning of an escape.
|
||||
if char == escKey {
|
||||
aw.buf = append(aw.buf, char)
|
||||
continue
|
||||
}
|
||||
|
||||
// Funtion identifiers.
|
||||
if len(aw.buf) == 1 && (char == '_' || char == 'P' || char == '[' ||
|
||||
char == ']' || char == '^' || char == ' ' || char == '#' ||
|
||||
char == '%' || char == '(' || char == ')' || char == '*' ||
|
||||
char == '+') {
|
||||
aw.buf = append(aw.buf, char)
|
||||
continue
|
||||
}
|
||||
|
||||
// Cursor functions.
|
||||
if len(aw.buf) == 1 && (char == '7' || char == '8') {
|
||||
// Add another char before because finish skips 2 items.
|
||||
aw.buf = append(aw.buf, '_', char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Keyboard functions.
|
||||
if len(aw.buf) == 1 && (char == '=' || char == '>') {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Bottom left function.
|
||||
if len(aw.buf) == 1 && char == 'F' {
|
||||
// Add extra char for finish.
|
||||
aw.buf = append(aw.buf, '_', char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Reset function.
|
||||
if len(aw.buf) == 1 && char == 'c' {
|
||||
// Add extra char for finish.
|
||||
aw.buf = append(aw.buf, '_', char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Space functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == ' ' && (char == 'F' || char == 'G' ||
|
||||
char == 'L' || char == 'M' || char == 'N') {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Number functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == '#' && (char >= '3' && char <= '6') ||
|
||||
char == '8' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Percentage functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == '%' && (char == '@' || char == 'G') {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Character set functions.
|
||||
if len(aw.buf) >= 2 && (aw.buf[1] == '(' || aw.buf[1] == ')' ||
|
||||
aw.buf[1] == '*' || aw.buf[1] == '+') && (char == '0' ||
|
||||
(char >= '4' && char <= '7') || char == '=' || (char >= 'A' &&
|
||||
char <= 'C') || char == 'E' || char == 'H' || char == 'K' ||
|
||||
char == 'Q' || char == 'R' || char == 'Y') {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// APC functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == '_' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
// End of APC.
|
||||
if char == '\\' && aw.buf[len(aw.buf)-1] == escKey {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// DC functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == 'P' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
// End of DC.
|
||||
if char == '\\' && aw.buf[len(aw.buf)-1] == escKey {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// CSI functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == '[' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
// End of CSI.
|
||||
if char == '@' || (char >= 'A' && char <= 'M') || char == 'P' ||
|
||||
char == 'S' || char == 'T' || char == 'X' || char == 'Z' ||
|
||||
char == '`' || (char >= 'b' && char <= 'd') || (char >= 'f' &&
|
||||
char <= 'i') || (char >= 'l' && char <= 'n') || (char >= 'p' &&
|
||||
char <= 't') || char == 'w' || char == 'x' || char == 'z' ||
|
||||
char == '{' || char == '|' {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// OSC functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == ']' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
// Capture incomplete code.
|
||||
if len(aw.buf) == 4 && aw.buf[2] == '0' && char == ';' {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// End of OSC.
|
||||
if (char == '\\' && aw.buf[len(aw.buf)-1] == escKey) || char == ctrlG {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// PM functions.
|
||||
if len(aw.buf) >= 2 && aw.buf[1] == '^' {
|
||||
aw.buf = append(aw.buf, char)
|
||||
|
||||
// End of PM.
|
||||
if char == '\\' && aw.buf[len(aw.buf)-1] == escKey {
|
||||
err := aw.finish(nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Normal character, resets escape buffer.
|
||||
if len(aw.buf) > 0 {
|
||||
aw.buf = nil
|
||||
}
|
||||
p = append(p, char)
|
||||
}
|
||||
|
||||
_, err := aw.file.Write(p)
|
||||
return len(b), err
|
||||
}
|
||||
|
||||
// finish finishes an ANSI escape code and calls the parsing function. Afterwards
|
||||
// the escape buffer is emptied.
|
||||
func (aw *AnsiWriter) finish(parse func([]byte) error) error {
|
||||
var err error
|
||||
|
||||
if parse != nil {
|
||||
err = parse(aw.buf[2:])
|
||||
}
|
||||
|
||||
aw.buf = nil
|
||||
return err
|
||||
}
|
152
tools/vendor/github.com/Bowery/prompt/buffer.go
generated
vendored
Normal file
152
tools/vendor/github.com/Bowery/prompt/buffer.go
generated
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Buffer contains state for line editing and writing.
|
||||
type Buffer struct {
|
||||
Out *os.File
|
||||
Prompt string
|
||||
Echo bool
|
||||
Cols int
|
||||
pos int
|
||||
size int
|
||||
data []rune
|
||||
}
|
||||
|
||||
// NewBuffer creates a buffer writing to out if echo is true.
|
||||
func NewBuffer(prompt string, out *os.File, echo bool) *Buffer {
|
||||
return &Buffer{
|
||||
Out: out,
|
||||
Prompt: prompt,
|
||||
Echo: echo,
|
||||
}
|
||||
}
|
||||
|
||||
// String returns the data as a string.
|
||||
func (buf *Buffer) String() string {
|
||||
return string(buf.data[:buf.size])
|
||||
}
|
||||
|
||||
// Insert inserts characters at the cursors position.
|
||||
func (buf *Buffer) Insert(rs ...rune) error {
|
||||
rsLen := len(rs)
|
||||
total := buf.size + rsLen
|
||||
|
||||
if total > len(buf.data) {
|
||||
buf.data = append(buf.data, make([]rune, rsLen)...)
|
||||
}
|
||||
|
||||
// Shift characters to make room in the correct pos.
|
||||
if buf.size != buf.pos {
|
||||
copy(buf.data[buf.pos+rsLen:buf.size+rsLen], buf.data[buf.pos:buf.size])
|
||||
}
|
||||
|
||||
for _, r := range rs {
|
||||
buf.data[buf.pos] = r
|
||||
buf.pos++
|
||||
buf.size++
|
||||
}
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// Set sets the content in the buffer.
|
||||
func (buf *Buffer) Set(rs ...rune) error {
|
||||
rsLen := len(rs)
|
||||
buf.data = rs
|
||||
buf.pos = rsLen
|
||||
buf.size = rsLen
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// Start moves the cursor to the start.
|
||||
func (buf *Buffer) Start() error {
|
||||
if buf.pos <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
buf.pos = 0
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// End moves the cursor to the end.
|
||||
func (buf *Buffer) End() error {
|
||||
if buf.pos >= buf.size {
|
||||
return nil
|
||||
}
|
||||
|
||||
buf.pos = buf.size
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// Left moves the cursor one character left.
|
||||
func (buf *Buffer) Left() error {
|
||||
if buf.pos <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
buf.pos--
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// Right moves the cursor one character right.
|
||||
func (buf *Buffer) Right() error {
|
||||
if buf.pos >= buf.size {
|
||||
return nil
|
||||
}
|
||||
|
||||
buf.pos++
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// Del removes the character at the cursor position.
|
||||
func (buf *Buffer) Del() error {
|
||||
if buf.pos >= buf.size {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Shift characters after position back one.
|
||||
copy(buf.data[buf.pos:], buf.data[buf.pos+1:buf.size])
|
||||
buf.size--
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// DelLeft removes the character to the left.
|
||||
func (buf *Buffer) DelLeft() error {
|
||||
if buf.pos <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Shift characters from position back one.
|
||||
copy(buf.data[buf.pos-1:], buf.data[buf.pos:buf.size])
|
||||
buf.pos--
|
||||
buf.size--
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// EndLine ends the line with CRLF.
|
||||
func (buf *Buffer) EndLine() error {
|
||||
_, err := buf.Out.Write(crlf)
|
||||
return err
|
||||
}
|
||||
|
||||
// toBytes converts a slice of runes to its equivalent in bytes.
|
||||
func toBytes(runes []rune) []byte {
|
||||
var bytes []byte
|
||||
char := make([]byte, utf8.UTFMax)
|
||||
|
||||
for _, r := range runes {
|
||||
n := utf8.EncodeRune(char, r)
|
||||
bytes = append(bytes, char[:n]...)
|
||||
}
|
||||
|
||||
return bytes
|
||||
}
|
76
tools/vendor/github.com/Bowery/prompt/buffer_unix.go
generated
vendored
Normal file
76
tools/vendor/github.com/Bowery/prompt/buffer_unix.go
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
// +build linux darwin freebsd openbsd netbsd dragonfly solaris
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Refresh rewrites the prompt and buffer.
|
||||
func (buf *Buffer) Refresh() error {
|
||||
// If we're not echoing just write prompt.
|
||||
if !buf.Echo {
|
||||
_, err := buf.Out.Write(mvLeftEdge)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write([]byte(buf.Prompt))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write(delRight)
|
||||
return err
|
||||
}
|
||||
|
||||
prLen := len(buf.Prompt)
|
||||
start := 0
|
||||
size := buf.size
|
||||
pos := buf.pos
|
||||
|
||||
// Get slice range that should be visible.
|
||||
for prLen+pos >= buf.Cols {
|
||||
start++
|
||||
size--
|
||||
pos--
|
||||
}
|
||||
for prLen+size > buf.Cols {
|
||||
size--
|
||||
}
|
||||
|
||||
_, err := buf.Out.Write(mvLeftEdge)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write([]byte(buf.Prompt))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write(toBytes(buf.data[start : size+start]))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write(delRight)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write([]byte(fmt.Sprintf(mvToCol, pos+prLen)))
|
||||
return err
|
||||
}
|
||||
|
||||
// ClsScreen clears the screen and refreshes.
|
||||
func (buf *Buffer) ClsScreen() error {
|
||||
_, err := buf.Out.Write(clsScreen)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
150
tools/vendor/github.com/Bowery/prompt/buffer_windows.go
generated
vendored
Normal file
150
tools/vendor/github.com/Bowery/prompt/buffer_windows.go
generated
vendored
Normal file
@ -0,0 +1,150 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var (
|
||||
fillConsoleOutputCharacter = kernel.NewProc("FillConsoleOutputCharacterW")
|
||||
setConsoleCursorPosition = kernel.NewProc("SetConsoleCursorPosition")
|
||||
)
|
||||
|
||||
// Refresh rewrites the prompt and buffer.
|
||||
func (buf *Buffer) Refresh() error {
|
||||
csbi := new(consoleScreenBufferInfo)
|
||||
ret, _, err := getConsoleScreenBufferInfo.Call(buf.Out.Fd(),
|
||||
uintptr(unsafe.Pointer(csbi)))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
// If we're not echoing just write prompt.
|
||||
if !buf.Echo {
|
||||
err = buf.delLine(csbi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = buf.mvLeftEdge(csbi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write([]byte(buf.Prompt))
|
||||
return err
|
||||
}
|
||||
|
||||
prLen := len(buf.Prompt)
|
||||
start := 0
|
||||
size := buf.size
|
||||
pos := buf.pos
|
||||
|
||||
// Get slice range that should be visible.
|
||||
for prLen+pos >= buf.Cols {
|
||||
start++
|
||||
size--
|
||||
pos--
|
||||
}
|
||||
for prLen+size > buf.Cols {
|
||||
size--
|
||||
}
|
||||
|
||||
err = buf.delLine(csbi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = buf.mvLeftEdge(csbi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write([]byte(buf.Prompt))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = buf.Out.Write(toBytes(buf.data[start : size+start]))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return buf.mvToCol(csbi, pos+prLen)
|
||||
}
|
||||
|
||||
// ClsScreen clears the screen and refreshes.
|
||||
func (buf *Buffer) ClsScreen() error {
|
||||
var written uint32
|
||||
coords := new(coord)
|
||||
|
||||
csbi := new(consoleScreenBufferInfo)
|
||||
ret, _, err := getConsoleScreenBufferInfo.Call(buf.Out.Fd(),
|
||||
uintptr(unsafe.Pointer(csbi)))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
// Clear everything from 0,0.
|
||||
ret, _, err = fillConsoleOutputCharacter.Call(buf.Out.Fd(), uintptr(' '),
|
||||
uintptr(csbi.size.x*csbi.size.y), uintptr(*(*int32)(unsafe.Pointer(coords))),
|
||||
uintptr(unsafe.Pointer(&written)))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set cursor at 0,0.
|
||||
ret, _, err = setConsoleCursorPosition.Call(buf.Out.Fd(),
|
||||
uintptr(*(*int32)(unsafe.Pointer(coords))))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return buf.Refresh()
|
||||
}
|
||||
|
||||
// delLine deletes the line the csbi cursor is positioned on.
|
||||
// TODO: Possible refresh jittering reason, instead we should copy the Unix
|
||||
// code and write over contents and then remove everything to the right.
|
||||
func (buf *Buffer) delLine(csbi *consoleScreenBufferInfo) error {
|
||||
var written uint32
|
||||
coords := &coord{y: csbi.cursorPosition.y}
|
||||
|
||||
ret, _, err := fillConsoleOutputCharacter.Call(buf.Out.Fd(), uintptr(' '),
|
||||
uintptr(csbi.size.x), uintptr(*(*int32)(unsafe.Pointer(coords))),
|
||||
uintptr(unsafe.Pointer(&written)))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// mvLeftEdge moves the cursor to the beginning of the line the csbi cursor
|
||||
// is positioned on.
|
||||
func (buf *Buffer) mvLeftEdge(csbi *consoleScreenBufferInfo) error {
|
||||
coords := &coord{y: csbi.cursorPosition.y}
|
||||
|
||||
ret, _, err := setConsoleCursorPosition.Call(buf.Out.Fd(),
|
||||
uintptr(*(*int32)(unsafe.Pointer(coords))))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// mvTolCol moves the cursor to the col on the line the csbi cursor is
|
||||
// positioned on.
|
||||
func (buf *Buffer) mvToCol(csbi *consoleScreenBufferInfo, x int) error {
|
||||
coords := &coord{x: int16(x), y: csbi.cursorPosition.y}
|
||||
|
||||
ret, _, err := setConsoleCursorPosition.Call(buf.Out.Fd(),
|
||||
uintptr(*(*int32)(unsafe.Pointer(coords))))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
15
tools/vendor/github.com/Bowery/prompt/ioctl_bsd.go
generated
vendored
Normal file
15
tools/vendor/github.com/Bowery/prompt/ioctl_bsd.go
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
// +build darwin freebsd openbsd netbsd dragonfly
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
const (
|
||||
tcgets = unix.TIOCGETA
|
||||
tcsets = unix.TIOCSETA
|
||||
tcsetsf = unix.TIOCSETAF
|
||||
)
|
13
tools/vendor/github.com/Bowery/prompt/ioctl_linux.go
generated
vendored
Normal file
13
tools/vendor/github.com/Bowery/prompt/ioctl_linux.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
const (
|
||||
tcgets = unix.TCGETS
|
||||
tcsets = unix.TCSETS
|
||||
tcsetsf = unix.TCSETSF
|
||||
)
|
41
tools/vendor/github.com/Bowery/prompt/ioctl_solaris.go
generated
vendored
Normal file
41
tools/vendor/github.com/Bowery/prompt/ioctl_solaris.go
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
const (
|
||||
tcgets = unix.TCGETS
|
||||
tcsetsf = unix.TCSETSF
|
||||
tcsets = unix.TCSETS
|
||||
)
|
||||
|
||||
// terminalSize retrieves the cols/rows for the terminal connected to out.
|
||||
func terminalSize(out *os.File) (int, int, error) {
|
||||
ws, err := unix.IoctlGetWinsize(int(out.Fd()), unix.TIOCGWINSZ)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
return int(ws.Col), int(ws.Row), nil
|
||||
}
|
||||
|
||||
// getTermios retrieves the termios settings for the terminal descriptor.
|
||||
func getTermios(fd uintptr) (*unix.Termios, error) {
|
||||
return unix.IoctlGetTermios(int(fd), tcgets)
|
||||
}
|
||||
|
||||
// setTermios sets the termios settings for the terminal descriptor,
|
||||
// optionally flushing the buffer before setting.
|
||||
func setTermios(fd uintptr, flush bool, mode *unix.Termios) error {
|
||||
req := tcsets
|
||||
if flush {
|
||||
req = tcsetsf
|
||||
}
|
||||
|
||||
return unix.IoctlSetTermios(int(fd), req, mode)
|
||||
}
|
62
tools/vendor/github.com/Bowery/prompt/ioctl_unix.go
generated
vendored
Normal file
62
tools/vendor/github.com/Bowery/prompt/ioctl_unix.go
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
// +build linux darwin freebsd openbsd netbsd dragonfly
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
// winsize contains the size for the terminal.
|
||||
type winsize struct {
|
||||
rows uint16
|
||||
cols uint16
|
||||
_ uint32
|
||||
}
|
||||
|
||||
// terminalSize retrieves the cols/rows for the terminal connected to out.
|
||||
func terminalSize(out *os.File) (int, int, error) {
|
||||
ws := new(winsize)
|
||||
|
||||
_, _, err := unix.Syscall(unix.SYS_IOCTL, out.Fd(),
|
||||
uintptr(unix.TIOCGWINSZ), uintptr(unsafe.Pointer(ws)))
|
||||
if err != 0 {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
return int(ws.cols), int(ws.rows), nil
|
||||
}
|
||||
|
||||
// getTermios retrieves the termios settings for the terminal descriptor.
|
||||
func getTermios(fd uintptr) (*unix.Termios, error) {
|
||||
termios := new(unix.Termios)
|
||||
|
||||
_, _, err := unix.Syscall(unix.SYS_IOCTL, fd, tcgets,
|
||||
uintptr(unsafe.Pointer(termios)))
|
||||
if err != 0 {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return termios, nil
|
||||
}
|
||||
|
||||
// setTermios sets the termios settings for the terminal descriptor,
|
||||
// optionally flushing the buffer before setting.
|
||||
func setTermios(fd uintptr, flush bool, mode *unix.Termios) error {
|
||||
req := tcsets
|
||||
if flush {
|
||||
req = tcsetsf
|
||||
}
|
||||
|
||||
_, _, err := unix.Syscall(unix.SYS_IOCTL, fd, uintptr(req),
|
||||
uintptr(unsafe.Pointer(mode)))
|
||||
if err != 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
41
tools/vendor/github.com/Bowery/prompt/keys.go
generated
vendored
Normal file
41
tools/vendor/github.com/Bowery/prompt/keys.go
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
// Line ending in raw mode.
|
||||
var crlf = []byte("\r\n")
|
||||
|
||||
const (
|
||||
backKey = '\u007f'
|
||||
escKey = '\u001B'
|
||||
spaceKey = '\u0020'
|
||||
)
|
||||
|
||||
const (
|
||||
ctrlA = iota + 1
|
||||
ctrlB
|
||||
ctrlC
|
||||
ctrlD
|
||||
ctrlE
|
||||
ctrlF
|
||||
ctrlG
|
||||
ctrlH
|
||||
tabKey
|
||||
ctrlJ
|
||||
ctrlK
|
||||
ctrlL
|
||||
returnKey
|
||||
ctrlN
|
||||
ctrlO
|
||||
ctrlP
|
||||
ctrlQ
|
||||
ctrlR
|
||||
ctrlS
|
||||
ctrlT
|
||||
ctrlU
|
||||
ctrlV
|
||||
ctrlW
|
||||
ctrlX
|
||||
ctrlY
|
||||
ctrlZ
|
||||
)
|
13
tools/vendor/github.com/Bowery/prompt/keys_unix.go
generated
vendored
Normal file
13
tools/vendor/github.com/Bowery/prompt/keys_unix.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
// +build linux darwin freebsd openbsd netbsd dragonfly solaris
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
const mvToCol = "\u001b[0G\u001b[%dC"
|
||||
|
||||
var (
|
||||
mvLeftEdge = []byte("\u001b[0G")
|
||||
clsScreen = []byte("\u001b[H\u001b[2J")
|
||||
delRight = []byte("\u001b[0K")
|
||||
)
|
34
tools/vendor/github.com/Bowery/prompt/keys_windows.go
generated
vendored
Normal file
34
tools/vendor/github.com/Bowery/prompt/keys_windows.go
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
const (
|
||||
f1Key = 0x70 + iota
|
||||
f2Key
|
||||
f3Key
|
||||
f4Key
|
||||
f5Key
|
||||
f6Key
|
||||
f7Key
|
||||
f8Key
|
||||
f9Key
|
||||
f10Key
|
||||
f11Key
|
||||
f12Key
|
||||
|
||||
homeKey = 0x24
|
||||
endKey = 0x23
|
||||
upKey = 0x26
|
||||
downKey = 0x28
|
||||
rightKey = 0x27
|
||||
leftKey = 0x25
|
||||
insertKey = 0x2d
|
||||
pgupKey = 0x21
|
||||
pgdownKey = 0x22
|
||||
deleteKey = 0x2e
|
||||
leftAltKey = 0x2
|
||||
rightAltKey = 0x1
|
||||
leftCtrlKey = 0x8
|
||||
rightCtrlKey = 0x4
|
||||
shiftKey = 0x10
|
||||
)
|
85
tools/vendor/github.com/Bowery/prompt/prompt.go
generated
vendored
Normal file
85
tools/vendor/github.com/Bowery/prompt/prompt.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
// Package prompt implements a cross platform line-editing prompt. It also
|
||||
// provides routines to use ANSI escape sequences across platforms for
|
||||
// terminal connected io.Readers/io.Writers.
|
||||
//
|
||||
// If os.Stdin isn't connected to a terminal or (on Unix)if the terminal
|
||||
// doesn't support the ANSI escape sequences needed a fallback prompt is
|
||||
// provided that doesn't do line-editing. Unix terminals that are not supported
|
||||
// will have the TERM environment variable set to either "dumb" or "cons25".
|
||||
//
|
||||
// The keyboard shortcuts are similar to those found in the Readline library:
|
||||
//
|
||||
// - Enter / CTRL+D
|
||||
// - End the line.
|
||||
// - CTRL+C
|
||||
// - End the line, return error `ErrCTRLC`.
|
||||
// - Backspace
|
||||
// - Remove the character to the left.
|
||||
// - CTRL+L
|
||||
// - Clear the screen(keeping the current lines content).
|
||||
// - Home / End
|
||||
// - Jump to the beginning/end of the line.
|
||||
// - Up arrow / Down arrow
|
||||
// - Go back and forward in the history.
|
||||
// - Left arrow / Right arrow
|
||||
// - Move left/right one character.
|
||||
// - Delete
|
||||
// - Remove the character to the right.
|
||||
package prompt
|
||||
|
||||
// Basic is a wrapper around Terminal.Basic.
|
||||
func Basic(prefix string, required bool) (string, error) {
|
||||
term, err := NewTerminal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer term.Close()
|
||||
|
||||
return term.Basic(prefix, required)
|
||||
}
|
||||
|
||||
// BasicDefault is a wrapper around Terminal.BasicDefault.
|
||||
func BasicDefault(prefix, def string) (string, error) {
|
||||
term, err := NewTerminal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer term.Close()
|
||||
|
||||
return term.BasicDefault(prefix, def)
|
||||
}
|
||||
|
||||
// Ask is a wrapper around Terminal.Ask.
|
||||
func Ask(question string) (bool, error) {
|
||||
term, err := NewTerminal()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer term.Close()
|
||||
|
||||
return term.Ask(question)
|
||||
}
|
||||
|
||||
// Custom is a wrapper around Terminal.Custom.
|
||||
func Custom(prefix string, test func(string) (string, bool)) (string, error) {
|
||||
term, err := NewTerminal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer term.Close()
|
||||
|
||||
return term.Custom(prefix, test)
|
||||
}
|
||||
|
||||
// Password is a wrapper around Terminal.Password.
|
||||
func Password(prefix string) (string, error) {
|
||||
term, err := NewTerminal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer term.Close()
|
||||
|
||||
return term.Password(prefix)
|
||||
}
|
471
tools/vendor/github.com/Bowery/prompt/term.go
generated
vendored
Normal file
471
tools/vendor/github.com/Bowery/prompt/term.go
generated
vendored
Normal file
@ -0,0 +1,471 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrCTRLC is returned when CTRL+C is pressed stopping the prompt.
|
||||
ErrCTRLC = errors.New("Interrupted (CTRL+C)")
|
||||
// ErrEOF is returned when CTRL+D is pressed stopping the prompt.
|
||||
ErrEOF = errors.New("EOF (CTRL+D)")
|
||||
)
|
||||
|
||||
// Possible events that may occur when reading from input.
|
||||
const (
|
||||
evChar = iota
|
||||
evSkip
|
||||
evReturn
|
||||
evEOF
|
||||
evCtrlC
|
||||
evBack
|
||||
evClear
|
||||
evHome
|
||||
evEnd
|
||||
evUp
|
||||
evDown
|
||||
evRight
|
||||
evLeft
|
||||
evDel
|
||||
)
|
||||
|
||||
// IsNotTerminal checks if an error is related to the input not being a terminal.
|
||||
func IsNotTerminal(err error) bool {
|
||||
return isNotTerminal(err)
|
||||
}
|
||||
|
||||
// TerminalSize retrieves the columns/rows for the terminal connected to out.
|
||||
func TerminalSize(out *os.File) (int, int, error) {
|
||||
return terminalSize(out)
|
||||
}
|
||||
|
||||
// Terminal contains the state for raw terminal input.
|
||||
type Terminal struct {
|
||||
In *os.File
|
||||
Out *os.File
|
||||
History []string
|
||||
histIdx int
|
||||
simpleReader *bufio.Reader
|
||||
t *terminal
|
||||
}
|
||||
|
||||
// NewTerminal creates a terminal and sets it to raw input mode.
|
||||
func NewTerminal() (*Terminal, error) {
|
||||
in := os.Stdin
|
||||
|
||||
term, err := newTerminal(in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Terminal{
|
||||
In: in,
|
||||
Out: os.Stdout,
|
||||
History: make([]string, 0, 10),
|
||||
histIdx: -1,
|
||||
t: term,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Basic gets input and if required tests to ensure input was given.
|
||||
func (term *Terminal) Basic(prefix string, required bool) (string, error) {
|
||||
return term.Custom(prefix, func(input string) (string, bool) {
|
||||
if required && input == "" {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return input, true
|
||||
})
|
||||
}
|
||||
|
||||
// BasicDefault gets input and if empty uses the given default.
|
||||
func (term *Terminal) BasicDefault(prefix, def string) (string, error) {
|
||||
return term.Custom(prefix+"(Default: "+def+")", func(input string) (string, bool) {
|
||||
if input == "" {
|
||||
input = def
|
||||
}
|
||||
|
||||
return input, true
|
||||
})
|
||||
}
|
||||
|
||||
// Ask gets input and checks if it's truthy or not, and returns that
|
||||
// in a boolean fashion.
|
||||
func (term *Terminal) Ask(question string) (bool, error) {
|
||||
input, err := term.Custom(question+"?(y/n)", func(input string) (string, bool) {
|
||||
if input == "" {
|
||||
return "", false
|
||||
}
|
||||
input = strings.ToLower(input)
|
||||
|
||||
if input == "y" || input == "yes" {
|
||||
return "yes", true
|
||||
}
|
||||
|
||||
return "", true
|
||||
})
|
||||
|
||||
var ok bool
|
||||
if input != "" {
|
||||
ok = true
|
||||
}
|
||||
|
||||
return ok, err
|
||||
}
|
||||
|
||||
// Custom gets input and calls the given test function with the input to
|
||||
// check if the input is valid, a true return will return the string.
|
||||
func (term *Terminal) Custom(prefix string, test func(string) (string, bool)) (string, error) {
|
||||
var err error
|
||||
var input string
|
||||
var ok bool
|
||||
|
||||
for !ok {
|
||||
input, err = term.GetPrompt(prefix)
|
||||
if err != nil && err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
|
||||
input, ok = test(input)
|
||||
}
|
||||
|
||||
return input, nil
|
||||
}
|
||||
|
||||
// Password retrieves a password from stdin without echoing it.
|
||||
func (term *Terminal) Password(prefix string) (string, error) {
|
||||
var err error
|
||||
var input string
|
||||
|
||||
for input == "" {
|
||||
input, err = term.GetPassword(prefix)
|
||||
if err != nil && err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return input, nil
|
||||
}
|
||||
|
||||
// GetPrompt gets a line with the prefix and echos input.
|
||||
func (term *Terminal) GetPrompt(prefix string) (string, error) {
|
||||
if !term.t.supportsEditing {
|
||||
return term.simplePrompt(prefix)
|
||||
}
|
||||
|
||||
buf := NewBuffer(prefix, term.Out, true)
|
||||
return term.prompt(buf, NewAnsiReader(term.In))
|
||||
}
|
||||
|
||||
// GetPassword gets a line with the prefix and doesn't echo input.
|
||||
func (term *Terminal) GetPassword(prefix string) (string, error) {
|
||||
if !term.t.supportsEditing {
|
||||
return term.simplePrompt(prefix)
|
||||
}
|
||||
|
||||
buf := NewBuffer(prefix, term.Out, false)
|
||||
return term.password(buf, NewAnsiReader(term.In))
|
||||
}
|
||||
|
||||
func (term *Terminal) Close() error {
|
||||
return term.t.Close()
|
||||
}
|
||||
|
||||
// simplePrompt is a fallback prompt without line editing support.
|
||||
func (term *Terminal) simplePrompt(prefix string) (string, error) {
|
||||
if term.simpleReader == nil {
|
||||
term.simpleReader = bufio.NewReader(term.In)
|
||||
}
|
||||
|
||||
_, err := term.Out.Write([]byte(prefix))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
line, err := term.simpleReader.ReadString('\n')
|
||||
line = strings.TrimRight(line, "\r\n ")
|
||||
line = strings.TrimLeft(line, " ")
|
||||
|
||||
return line, err
|
||||
}
|
||||
|
||||
// setup initializes a prompt.
|
||||
func (term *Terminal) setup(buf *Buffer, in io.Reader) (*bufio.Reader, error) {
|
||||
cols, _, err := TerminalSize(buf.Out)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf.Cols = cols
|
||||
input := bufio.NewReader(in)
|
||||
|
||||
err = buf.Refresh()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return input, nil
|
||||
}
|
||||
|
||||
// read reads a rune and parses ANSI escape sequences found
|
||||
func (term *Terminal) read(in *bufio.Reader) (int, rune, error) {
|
||||
char, _, err := in.ReadRune()
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
switch char {
|
||||
default:
|
||||
// Standard chars.
|
||||
return evChar, char, nil
|
||||
case tabKey, ctrlA, ctrlB, ctrlE, ctrlF, ctrlG, ctrlH, ctrlJ, ctrlK, ctrlN,
|
||||
ctrlO, ctrlP, ctrlQ, ctrlR, ctrlS, ctrlT, ctrlU, ctrlV, ctrlW, ctrlX,
|
||||
ctrlY, ctrlZ:
|
||||
// Skip.
|
||||
return evSkip, char, nil
|
||||
case returnKey:
|
||||
// End of line.
|
||||
return evReturn, char, nil
|
||||
case ctrlD:
|
||||
// End of file.
|
||||
return evEOF, char, nil
|
||||
case ctrlC:
|
||||
// End of line, interrupted.
|
||||
return evCtrlC, char, nil
|
||||
case backKey:
|
||||
// Backspace.
|
||||
return evBack, char, nil
|
||||
case ctrlL:
|
||||
// Clear screen.
|
||||
return evClear, char, nil
|
||||
case escKey:
|
||||
// Functions like arrows, home, etc.
|
||||
esc := make([]byte, 2)
|
||||
_, err = in.Read(esc)
|
||||
if err != nil {
|
||||
return -1, char, err
|
||||
}
|
||||
|
||||
// Home, end.
|
||||
if esc[0] == 'O' {
|
||||
switch esc[1] {
|
||||
case 'H':
|
||||
// Home.
|
||||
return evHome, char, nil
|
||||
case 'F':
|
||||
// End.
|
||||
return evEnd, char, nil
|
||||
}
|
||||
|
||||
return evSkip, char, nil
|
||||
}
|
||||
|
||||
// Arrows, delete, pgup, pgdown, insert.
|
||||
if esc[0] == '[' {
|
||||
switch esc[1] {
|
||||
case 'A':
|
||||
// Up.
|
||||
return evUp, char, nil
|
||||
case 'B':
|
||||
// Down.
|
||||
return evDown, char, nil
|
||||
case 'C':
|
||||
// Right.
|
||||
return evRight, char, nil
|
||||
case 'D':
|
||||
// Left.
|
||||
return evLeft, char, nil
|
||||
}
|
||||
|
||||
// Delete, pgup, pgdown, insert.
|
||||
if esc[1] > '0' && esc[1] < '7' {
|
||||
extEsc := make([]byte, 3)
|
||||
_, err = in.Read(extEsc)
|
||||
if err != nil {
|
||||
return -1, char, err
|
||||
}
|
||||
|
||||
if extEsc[0] == '~' {
|
||||
switch esc[1] {
|
||||
case '2', '5', '6':
|
||||
// Insert, pgup, pgdown.
|
||||
return evSkip, char, err
|
||||
case '3':
|
||||
// Delete.
|
||||
return evDel, char, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return evSkip, char, nil
|
||||
}
|
||||
|
||||
// prompt reads from in and parses ANSI escapes writing to buf.
|
||||
func (term *Terminal) prompt(buf *Buffer, in io.Reader) (string, error) {
|
||||
input, err := term.setup(buf, in)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
term.History = append(term.History, "")
|
||||
term.histIdx = len(term.History) - 1
|
||||
curHistIdx := term.histIdx
|
||||
|
||||
for {
|
||||
typ, char, err := term.read(input)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
switch typ {
|
||||
case evChar:
|
||||
err = buf.Insert(char)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
term.History[curHistIdx] = buf.String()
|
||||
case evSkip:
|
||||
continue
|
||||
case evReturn:
|
||||
err = buf.EndLine()
|
||||
return buf.String(), err
|
||||
case evEOF:
|
||||
err = buf.EndLine()
|
||||
if err == nil {
|
||||
err = ErrEOF
|
||||
}
|
||||
|
||||
return buf.String(), err
|
||||
case evCtrlC:
|
||||
err = buf.EndLine()
|
||||
if err == nil {
|
||||
err = ErrCTRLC
|
||||
}
|
||||
|
||||
return buf.String(), err
|
||||
case evBack:
|
||||
err = buf.DelLeft()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
term.History[curHistIdx] = buf.String()
|
||||
case evClear:
|
||||
err = buf.ClsScreen()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evHome:
|
||||
err = buf.Start()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evEnd:
|
||||
err = buf.End()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evUp:
|
||||
idx := term.histIdx
|
||||
if term.histIdx > 0 {
|
||||
idx--
|
||||
}
|
||||
|
||||
err = buf.Set([]rune(term.History[idx])...)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
term.histIdx = idx
|
||||
case evDown:
|
||||
idx := term.histIdx
|
||||
if term.histIdx < len(term.History)-1 {
|
||||
idx++
|
||||
}
|
||||
|
||||
err = buf.Set([]rune(term.History[idx])...)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
term.histIdx = idx
|
||||
case evRight:
|
||||
err = buf.Right()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evLeft:
|
||||
err = buf.Left()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evDel:
|
||||
err = buf.Del()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
term.History[curHistIdx] = buf.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// password reads from in and parses restricted ANSI escapes writing to buf.
|
||||
func (term *Terminal) password(buf *Buffer, in io.Reader) (string, error) {
|
||||
input, err := term.setup(buf, in)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for {
|
||||
typ, char, err := term.read(input)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
switch typ {
|
||||
case evChar:
|
||||
err = buf.Insert(char)
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evSkip, evHome, evEnd, evUp, evDown, evRight, evLeft, evDel:
|
||||
continue
|
||||
case evReturn:
|
||||
err = buf.EndLine()
|
||||
return buf.String(), err
|
||||
case evEOF:
|
||||
err = buf.EndLine()
|
||||
if err == nil {
|
||||
err = ErrEOF
|
||||
}
|
||||
|
||||
return buf.String(), err
|
||||
case evCtrlC:
|
||||
err = buf.EndLine()
|
||||
if err == nil {
|
||||
err = ErrCTRLC
|
||||
}
|
||||
|
||||
return buf.String(), err
|
||||
case evBack:
|
||||
err = buf.DelLeft()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
case evClear:
|
||||
err = buf.ClsScreen()
|
||||
if err != nil {
|
||||
return buf.String(), err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
96
tools/vendor/github.com/Bowery/prompt/term_unix.go
generated
vendored
Normal file
96
tools/vendor/github.com/Bowery/prompt/term_unix.go
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
// +build linux darwin freebsd openbsd netbsd dragonfly solaris
|
||||
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
// List of unsupported $TERM values.
|
||||
var unsupported = []string{"", "dumb", "cons25"}
|
||||
|
||||
// supportsEditing checks if the terminal supports ansi escapes.
|
||||
func supportsEditing() bool {
|
||||
term := os.Getenv("TERM")
|
||||
|
||||
for _, t := range unsupported {
|
||||
if t == term {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isNotTerminal checks if an error is related to the input not being a terminal.
|
||||
func isNotTerminal(err error) bool {
|
||||
return err == unix.ENOTTY
|
||||
}
|
||||
|
||||
// terminal contains the private fields for a Unix terminal.
|
||||
type terminal struct {
|
||||
supportsEditing bool
|
||||
fd uintptr
|
||||
origMode unix.Termios
|
||||
}
|
||||
|
||||
// newTerminal creates a terminal and sets it to raw input mode.
|
||||
func newTerminal(in *os.File) (*terminal, error) {
|
||||
term := &terminal{fd: in.Fd()}
|
||||
|
||||
if !supportsEditing() {
|
||||
return term, nil
|
||||
}
|
||||
|
||||
t, err := getTermios(term.fd)
|
||||
if err != nil {
|
||||
if IsNotTerminal(err) {
|
||||
return term, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
term.origMode = *t
|
||||
mode := term.origMode
|
||||
term.supportsEditing = true
|
||||
|
||||
// Set new mode flags, for reference see cfmakeraw(3).
|
||||
mode.Iflag &^= (unix.BRKINT | unix.IGNBRK | unix.ICRNL |
|
||||
unix.INLCR | unix.IGNCR | unix.ISTRIP | unix.IXON |
|
||||
unix.PARMRK)
|
||||
|
||||
mode.Oflag &^= unix.OPOST
|
||||
|
||||
mode.Lflag &^= (unix.ECHO | unix.ECHONL | unix.ICANON |
|
||||
unix.ISIG | unix.IEXTEN)
|
||||
|
||||
mode.Cflag &^= (unix.CSIZE | unix.PARENB)
|
||||
mode.Cflag |= unix.CS8
|
||||
|
||||
// Set controls; min num of bytes, and timeouts.
|
||||
mode.Cc[unix.VMIN] = 1
|
||||
mode.Cc[unix.VTIME] = 0
|
||||
|
||||
err = setTermios(term.fd, true, &mode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return term, nil
|
||||
}
|
||||
|
||||
// Close disables the terminals raw input.
|
||||
func (term *terminal) Close() error {
|
||||
if term.supportsEditing {
|
||||
err := setTermios(term.fd, false, &term.origMode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
116
tools/vendor/github.com/Bowery/prompt/term_windows.go
generated
vendored
Normal file
116
tools/vendor/github.com/Bowery/prompt/term_windows.go
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
// Copyright 2013-2015 Bowery, Inc.
|
||||
|
||||
package prompt
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Flags to control the terminals mode.
|
||||
const (
|
||||
echoInputFlag = 0x0004
|
||||
insertModeFlag = 0x0020
|
||||
lineInputFlag = 0x0002
|
||||
mouseInputFlag = 0x0010
|
||||
processedInputFlag = 0x0001
|
||||
windowInputFlag = 0x0008
|
||||
)
|
||||
|
||||
// Error number returned for an invalid handle.
|
||||
const errnoInvalidHandle = 0x6
|
||||
|
||||
var (
|
||||
kernel = syscall.NewLazyDLL("kernel32.dll")
|
||||
getConsoleScreenBufferInfo = kernel.NewProc("GetConsoleScreenBufferInfo")
|
||||
setConsoleMode = kernel.NewProc("SetConsoleMode")
|
||||
)
|
||||
|
||||
// consoleScreenBufferInfo contains various fields for the terminal.
|
||||
type consoleScreenBufferInfo struct {
|
||||
size coord
|
||||
cursorPosition coord
|
||||
attributes uint16
|
||||
window smallRect
|
||||
maximumWindowSize coord
|
||||
}
|
||||
|
||||
// coord contains coords for positioning.
|
||||
type coord struct {
|
||||
x int16
|
||||
y int16
|
||||
}
|
||||
|
||||
// smallRect contains positions for the window edges.
|
||||
type smallRect struct {
|
||||
left int16
|
||||
top int16
|
||||
right int16
|
||||
bottom int16
|
||||
}
|
||||
|
||||
// terminalSize retrieves the cols/rows for the terminal connected to out.
|
||||
func terminalSize(out *os.File) (int, int, error) {
|
||||
csbi := new(consoleScreenBufferInfo)
|
||||
|
||||
ret, _, err := getConsoleScreenBufferInfo.Call(out.Fd(), uintptr(unsafe.Pointer(csbi)))
|
||||
if ret == 0 {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// Results are always off by one.
|
||||
cols := csbi.window.right - csbi.window.left + 1
|
||||
rows := csbi.window.bottom - csbi.window.top + 1
|
||||
|
||||
return int(cols), int(rows), nil
|
||||
}
|
||||
|
||||
// isNotTerminal checks if an error is related to the input not being a terminal.
|
||||
func isNotTerminal(err error) bool {
|
||||
errno, ok := err.(syscall.Errno)
|
||||
|
||||
return ok && errno == errnoInvalidHandle
|
||||
}
|
||||
|
||||
// terminal contains the private fields for a Windows terminal.
|
||||
type terminal struct {
|
||||
supportsEditing bool
|
||||
fd uintptr
|
||||
origMode uint32
|
||||
}
|
||||
|
||||
// newTerminal creates a terminal and sets it to raw input mode.
|
||||
func newTerminal(in *os.File) (*terminal, error) {
|
||||
term := &terminal{fd: in.Fd()}
|
||||
|
||||
err := syscall.GetConsoleMode(syscall.Handle(term.fd), &term.origMode)
|
||||
if err != nil {
|
||||
return term, nil
|
||||
}
|
||||
mode := term.origMode
|
||||
term.supportsEditing = true
|
||||
|
||||
// Set new mode flags.
|
||||
mode &^= (echoInputFlag | insertModeFlag | lineInputFlag | mouseInputFlag |
|
||||
processedInputFlag | windowInputFlag)
|
||||
|
||||
ret, _, err := setConsoleMode.Call(term.fd, uintptr(mode))
|
||||
if ret == 0 {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return term, nil
|
||||
}
|
||||
|
||||
// Close disables the terminals raw input.
|
||||
func (term *terminal) Close() error {
|
||||
if term.supportsEditing {
|
||||
ret, _, err := setConsoleMode.Call(term.fd, uintptr(term.origMode))
|
||||
if ret == 0 {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
154
tools/vendor/github.com/GoASTScanner/gas/LICENSE.txt
generated
vendored
Normal file
154
tools/vendor/github.com/GoASTScanner/gas/LICENSE.txt
generated
vendored
Normal file
@ -0,0 +1,154 @@
|
||||
Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this
|
||||
License, each Contributor hereby grants to You a perpetual, worldwide,
|
||||
non-exclusive, no-charge, royalty-free, irrevocable copyright license to
|
||||
reproduce, prepare Derivative Works of, publicly display, publicly perform,
|
||||
sublicense, and distribute the Work and such Derivative Works in Source or
|
||||
Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License,
|
||||
each Contributor hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||
no-charge, royalty-free, irrevocable (except as stated in this section) patent
|
||||
license to make, have made, use, offer to sell, sell, import, and otherwise
|
||||
transfer the Work, where such license applies only to those patent claims
|
||||
licensable by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s) with the Work
|
||||
to which such Contribution(s) was submitted. If You institute patent litigation
|
||||
against any entity (including a cross-claim or counterclaim in a lawsuit)
|
||||
alleging that the Work or a Contribution incorporated within the Work
|
||||
constitutes direct or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate as of the date
|
||||
such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or
|
||||
Derivative Works thereof in any medium, with or without modifications, and in
|
||||
Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and You must retain, in the Source form of
|
||||
any Derivative Works that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works; and If the Work
|
||||
includes a "NOTICE" text file as part of its distribution, then any Derivative
|
||||
Works that You distribute must include a readable copy of the attribution
|
||||
notices contained within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one of the following
|
||||
places: within a NOTICE text file distributed as part of the Derivative Works;
|
||||
within the Source form or documentation, if provided along with the Derivative
|
||||
Works; or, within a display generated by the Derivative Works, if and wherever
|
||||
such third-party notices normally appear. The contents of the NOTICE file are
|
||||
for informational purposes only and do not modify the License. You may add Your
|
||||
own attribution notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided that such
|
||||
additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License. 5. Submission of Contributions.
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names,
|
||||
trademarks, service marks, or product names of the Licensor, except as required
|
||||
for reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in
|
||||
writing, Licensor provides the Work (and each Contributor provides its
|
||||
Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied, including, without limitation, any warranties
|
||||
or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any risks
|
||||
associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in
|
||||
tort (including negligence), contract, or otherwise, unless required by
|
||||
applicable law (such as deliberate and grossly negligent acts) or agreed to in
|
||||
writing, shall any Contributor be liable to You for damages, including any
|
||||
direct, indirect, special, incidental, or consequential damages of any character
|
||||
arising as a result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill, work stoppage,
|
||||
computer failure or malfunction, or any and all other commercial damages or
|
||||
losses), even if such Contributor has been advised of the possibility of such
|
||||
damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or
|
||||
Derivative Works thereof, You may choose to offer, and charge a fee for,
|
||||
acceptance of support, warranty, indemnity, or other liability obligations
|
||||
and/or rights consistent with this License. However, in accepting such
|
||||
obligations, You may act only on Your own behalf and on Your sole
|
||||
responsibility, not on behalf of any other Contributor, and only if You agree to
|
||||
indemnify, defend, and hold each Contributor harmless for any liability incurred
|
||||
by, or claims asserted against, such Contributor by reason of your accepting any
|
||||
such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
114
tools/vendor/github.com/GoASTScanner/gas/README.md
generated
vendored
Normal file
114
tools/vendor/github.com/GoASTScanner/gas/README.md
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
|
||||
|
||||
## GAS - Go AST Scanner
|
||||
|
||||
Inspects source code for security problems by scanning the Go AST.
|
||||
|
||||
### License
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License [here](http://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
||||
### Project status
|
||||
|
||||
[](https://travis-ci.org/GoASTScanner/gas)
|
||||
[](https://godoc.org/github.com/GoASTScanner/gas)
|
||||
|
||||
Gas is still in alpha and accepting feedback from early adopters. We do
|
||||
not consider it production ready at this time.
|
||||
|
||||
### Usage
|
||||
|
||||
Gas can be configured to only run a subset of rules, to exclude certain file
|
||||
paths, and produce reports in different formats. By default all rules will be
|
||||
run against the supplied input files. To recursively scan from the current
|
||||
directory you can supply './...' as the input argument.
|
||||
|
||||
#### Selecting rules
|
||||
|
||||
By default Gas will run all rules against the supplied file paths. It is however possible to select a subset of rules to run via the '-include=' flag,
|
||||
or to specify a set of rules to explicitly exclude using the '-exclude=' flag.
|
||||
|
||||
##### Available rules
|
||||
|
||||
- G101: Look for hardcoded credentials
|
||||
- G102: Bind to all interfaces
|
||||
- G103: Audit the use of unsafe block
|
||||
- G104: Audit errors not checked
|
||||
- G105: Audit the use of math/big.Int.Exp
|
||||
- G201: SQL query construction using format string
|
||||
- G202: SQL query construction using string concatenation
|
||||
- G203: Use of unescaped data in HTML templates
|
||||
- G204: Audit use of command execution
|
||||
- G301: Poor file permissions used when creating a directory
|
||||
- G302: Poor file permisions used with chmod
|
||||
- G303: Creating tempfile using a predictable path
|
||||
- G401: Detect the usage of DES, RC4, or MD5
|
||||
- G402: Look for bad TLS connection settings
|
||||
- G403: Ensure minimum RSA key length of 2048 bits
|
||||
- G404: Insecure random number source (rand)
|
||||
- G501: Import blacklist: crypto/md5
|
||||
- G502: Import blacklist: crypto/des
|
||||
- G503: Import blacklist: crypto/rc4
|
||||
- G504: Import blacklist: net/http/cgi
|
||||
|
||||
|
||||
```
|
||||
# Run a specific set of rules
|
||||
$ gas -include=G101,G203,G401 ./...
|
||||
|
||||
# Run everything except for rule G303
|
||||
$ gas -exclude=G303 ./...
|
||||
```
|
||||
|
||||
#### Excluding files:
|
||||
|
||||
Gas can be told to \ignore paths that match a supplied pattern using the 'skip' command line option. This is
|
||||
accomplished via [go-glob](github.com/ryanuber/go-glob). Multiple patterns can be specified as follows:
|
||||
|
||||
```
|
||||
$ gas -skip=tests* -skip=*_example.go ./...
|
||||
```
|
||||
|
||||
#### Annotating code
|
||||
|
||||
As with all automated detection tools there will be cases of false positives. In cases where Gas reports a failure that has been manually verified as being safe it is possible to annotate the code with a '#nosec' comment.
|
||||
|
||||
The annotation causes Gas to stop processing any further nodes within the
|
||||
AST so can apply to a whole block or more granularly to a single expression.
|
||||
|
||||
```go
|
||||
|
||||
import "md5" // #nosec
|
||||
|
||||
|
||||
func main(){
|
||||
|
||||
/* #nosec */
|
||||
if x > y {
|
||||
h := md5.New() // this will also be ignored
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
In some cases you may also want to revisit places where #nosec annotations
|
||||
have been used. To run the scanner and ignore any #nosec annotations you
|
||||
can do the following:
|
||||
|
||||
```
|
||||
$ gas -nosec=true ./...
|
||||
```
|
||||
|
||||
### Output formats
|
||||
|
||||
Gas currently supports text, json and csv output formats. By default
|
||||
results will be reported to stdout, but can also be written to an output
|
||||
file. The output format is controlled by the '-fmt' flag, and the output file is controlled by the '-out' flag as follows:
|
||||
|
||||
```
|
||||
# Write output in json format to results.json
|
||||
$ gas -fmt=json -out=results.json *.go
|
||||
```
|
235
tools/vendor/github.com/GoASTScanner/gas/core/analyzer.go
generated
vendored
Normal file
235
tools/vendor/github.com/GoASTScanner/gas/core/analyzer.go
generated
vendored
Normal file
@ -0,0 +1,235 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package core holds the central scanning logic used by GAS
|
||||
package core
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/importer"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ImportInfo is used to track aliased and initialization only imports.
|
||||
type ImportInfo struct {
|
||||
Imported map[string]string
|
||||
Aliased map[string]string
|
||||
InitOnly map[string]bool
|
||||
}
|
||||
|
||||
func NewImportInfo() *ImportInfo {
|
||||
return &ImportInfo{
|
||||
make(map[string]string),
|
||||
make(map[string]string),
|
||||
make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
// The Context is populated with data parsed from the source code as it is scanned.
|
||||
// It is passed through to all rule functions as they are called. Rules may use
|
||||
// this data in conjunction withe the encoutered AST node.
|
||||
type Context struct {
|
||||
FileSet *token.FileSet
|
||||
Comments ast.CommentMap
|
||||
Info *types.Info
|
||||
Pkg *types.Package
|
||||
Root *ast.File
|
||||
Config map[string]interface{}
|
||||
Imports *ImportInfo
|
||||
}
|
||||
|
||||
// The Rule interface used by all rules supported by GAS.
|
||||
type Rule interface {
|
||||
Match(ast.Node, *Context) (*Issue, error)
|
||||
}
|
||||
|
||||
// A RuleSet maps lists of rules to the type of AST node they should be run on.
|
||||
// The anaylzer will only invoke rules contained in the list associated with the
|
||||
// type of AST node it is currently visiting.
|
||||
type RuleSet map[reflect.Type][]Rule
|
||||
|
||||
// Metrics used when reporting information about a scanning run.
|
||||
type Metrics struct {
|
||||
NumFiles int `json:"files"`
|
||||
NumLines int `json:"lines"`
|
||||
NumNosec int `json:"nosec"`
|
||||
NumFound int `json:"found"`
|
||||
}
|
||||
|
||||
// The Analyzer object is the main object of GAS. It has methods traverse an AST
|
||||
// and invoke the correct checking rules as on each node as required.
|
||||
type Analyzer struct {
|
||||
ignoreNosec bool
|
||||
ruleset RuleSet
|
||||
context *Context
|
||||
logger *log.Logger
|
||||
Issues []*Issue `json:"issues"`
|
||||
Stats *Metrics `json:"metrics"`
|
||||
}
|
||||
|
||||
// NewAnalyzer builds a new anaylzer.
|
||||
func NewAnalyzer(conf map[string]interface{}, logger *log.Logger) Analyzer {
|
||||
if logger == nil {
|
||||
logger = log.New(os.Stdout, "[gas]", 0)
|
||||
}
|
||||
a := Analyzer{
|
||||
ignoreNosec: conf["ignoreNosec"].(bool),
|
||||
ruleset: make(RuleSet),
|
||||
context: &Context{nil, nil, nil, nil, nil, nil, nil},
|
||||
logger: logger,
|
||||
Issues: make([]*Issue, 0, 16),
|
||||
Stats: &Metrics{0, 0, 0, 0},
|
||||
}
|
||||
|
||||
// TODO(tkelsey): use the inc/exc lists
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
func (gas *Analyzer) process(filename string, source interface{}) error {
|
||||
mode := parser.ParseComments
|
||||
gas.context.FileSet = token.NewFileSet()
|
||||
root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode)
|
||||
if err == nil {
|
||||
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments)
|
||||
gas.context.Root = root
|
||||
|
||||
// here we get type info
|
||||
gas.context.Info = &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
Defs: make(map[*ast.Ident]types.Object),
|
||||
Uses: make(map[*ast.Ident]types.Object),
|
||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||
Scopes: make(map[ast.Node]*types.Scope),
|
||||
Implicits: make(map[ast.Node]types.Object),
|
||||
}
|
||||
|
||||
conf := types.Config{Importer: importer.Default()}
|
||||
gas.context.Pkg, err = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info)
|
||||
if err != nil {
|
||||
// TODO(gm) Type checker not currently considering all files within a package
|
||||
// see: issue #113
|
||||
gas.logger.Printf(`Error during type checking: "%s"`, err)
|
||||
err = nil
|
||||
}
|
||||
|
||||
gas.context.Imports = NewImportInfo()
|
||||
for _, pkg := range gas.context.Pkg.Imports() {
|
||||
gas.context.Imports.Imported[pkg.Path()] = pkg.Name()
|
||||
}
|
||||
ast.Walk(gas, root)
|
||||
gas.Stats.NumFiles++
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// AddRule adds a rule into a rule set list mapped to the given AST node's type.
|
||||
// The node is only needed for its type and is not otherwise used.
|
||||
func (gas *Analyzer) AddRule(r Rule, nodes []ast.Node) {
|
||||
for _, n := range nodes {
|
||||
t := reflect.TypeOf(n)
|
||||
if val, ok := gas.ruleset[t]; ok {
|
||||
gas.ruleset[t] = append(val, r)
|
||||
} else {
|
||||
gas.ruleset[t] = []Rule{r}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process reads in a source file, convert it to an AST and traverse it.
|
||||
// Rule methods added with AddRule will be invoked as necessary.
|
||||
func (gas *Analyzer) Process(filename string) error {
|
||||
err := gas.process(filename, nil)
|
||||
fun := func(f *token.File) bool {
|
||||
gas.Stats.NumLines += f.LineCount()
|
||||
return true
|
||||
}
|
||||
gas.context.FileSet.Iterate(fun)
|
||||
return err
|
||||
}
|
||||
|
||||
// ProcessSource will convert a source code string into an AST and traverse it.
|
||||
// Rule methods added with AddRule will be invoked as necessary. The string is
|
||||
// identified by the filename given but no file IO will be done.
|
||||
func (gas *Analyzer) ProcessSource(filename string, source string) error {
|
||||
err := gas.process(filename, source)
|
||||
fun := func(f *token.File) bool {
|
||||
gas.Stats.NumLines += f.LineCount()
|
||||
return true
|
||||
}
|
||||
gas.context.FileSet.Iterate(fun)
|
||||
return err
|
||||
}
|
||||
|
||||
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
|
||||
func (gas *Analyzer) ignore(n ast.Node) bool {
|
||||
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
|
||||
for _, group := range groups {
|
||||
if strings.Contains(group.Text(), "#nosec") {
|
||||
gas.Stats.NumNosec++
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Visit runs the GAS visitor logic over an AST created by parsing go code.
|
||||
// Rule methods added with AddRule will be invoked as necessary.
|
||||
func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
|
||||
if !gas.ignore(n) {
|
||||
|
||||
// Track aliased and initialization imports
|
||||
if imported, ok := n.(*ast.ImportSpec); ok {
|
||||
path := strings.Trim(imported.Path.Value, `"`)
|
||||
if imported.Name != nil {
|
||||
if imported.Name.Name == "_" {
|
||||
// Initialization import
|
||||
gas.context.Imports.InitOnly[path] = true
|
||||
} else {
|
||||
// Aliased import
|
||||
gas.context.Imports.Aliased[path] = imported.Name.Name
|
||||
}
|
||||
}
|
||||
// unsafe is not included in Package.Imports()
|
||||
if path == "unsafe" {
|
||||
gas.context.Imports.Imported[path] = path
|
||||
}
|
||||
}
|
||||
|
||||
if val, ok := gas.ruleset[reflect.TypeOf(n)]; ok {
|
||||
for _, rule := range val {
|
||||
ret, err := rule.Match(n, gas.context)
|
||||
if err != nil {
|
||||
file, line := GetLocation(n, gas.context)
|
||||
file = path.Base(file)
|
||||
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
||||
}
|
||||
if ret != nil {
|
||||
gas.Issues = append(gas.Issues, ret)
|
||||
gas.Stats.NumFound++
|
||||
}
|
||||
}
|
||||
}
|
||||
return gas
|
||||
}
|
||||
return nil
|
||||
}
|
73
tools/vendor/github.com/GoASTScanner/gas/core/call_list.go
generated
vendored
Normal file
73
tools/vendor/github.com/GoASTScanner/gas/core/call_list.go
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
)
|
||||
|
||||
type set map[string]bool
|
||||
|
||||
/// CallList is used to check for usage of specific packages
|
||||
/// and functions.
|
||||
type CallList map[string]set
|
||||
|
||||
/// NewCallList creates a new empty CallList
|
||||
func NewCallList() CallList {
|
||||
return make(CallList)
|
||||
}
|
||||
|
||||
/// AddAll will add several calls to the call list at once
|
||||
func (c CallList) AddAll(selector string, idents ...string) {
|
||||
for _, ident := range idents {
|
||||
c.Add(selector, ident)
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a selector and call to the call list
|
||||
func (c CallList) Add(selector, ident string) {
|
||||
if _, ok := c[selector]; !ok {
|
||||
c[selector] = make(set)
|
||||
}
|
||||
c[selector][ident] = true
|
||||
}
|
||||
|
||||
/// Contains returns true if the package and function are
|
||||
/// members of this call list.
|
||||
func (c CallList) Contains(selector, ident string) bool {
|
||||
if idents, ok := c[selector]; ok {
|
||||
_, found := idents[ident]
|
||||
return found
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/// ContainsCallExpr resolves the call expression name and type
|
||||
/// or package and determines if it exists within the CallList
|
||||
func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) bool {
|
||||
selector, ident, err := GetCallInfo(n, ctx)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
// Try direct resolution
|
||||
if c.Contains(selector, ident) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Also support explicit path
|
||||
if path, ok := GetImportPath(selector, ctx); ok {
|
||||
return c.Contains(path, ident)
|
||||
}
|
||||
return false
|
||||
}
|
220
tools/vendor/github.com/GoASTScanner/gas/core/helpers.go
generated
vendored
Normal file
220
tools/vendor/github.com/GoASTScanner/gas/core/helpers.go
generated
vendored
Normal file
@ -0,0 +1,220 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// helpfull "canned" matching routines ----------------------------------------
|
||||
|
||||
func selectName(n ast.Node, s reflect.Type) (string, bool) {
|
||||
t := reflect.TypeOf(&ast.SelectorExpr{})
|
||||
if node, ok := SimpleSelect(n, s, t).(*ast.SelectorExpr); ok {
|
||||
t = reflect.TypeOf(&ast.Ident{})
|
||||
if ident, ok := SimpleSelect(node.X, t).(*ast.Ident); ok {
|
||||
return strings.Join([]string{ident.Name, node.Sel.Name}, "."), ok
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// MatchCall will match an ast.CallNode if its method name obays the given regex.
|
||||
func MatchCall(n ast.Node, r *regexp.Regexp) *ast.CallExpr {
|
||||
t := reflect.TypeOf(&ast.CallExpr{})
|
||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
||||
return n.(*ast.CallExpr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MatchCallByPackage ensures that the specified package is imported,
|
||||
// adjusts the name for any aliases and ignores cases that are
|
||||
// initialization only imports.
|
||||
//
|
||||
// Usage:
|
||||
// node, matched := MatchCallByPackage(n, ctx, "math/rand", "Read")
|
||||
//
|
||||
func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*ast.CallExpr, bool) {
|
||||
|
||||
importedName, found := GetImportedName(pkg, c)
|
||||
if !found {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if callExpr, ok := n.(*ast.CallExpr); ok {
|
||||
packageName, callName, err := GetCallInfo(callExpr, c)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
if packageName == importedName {
|
||||
for _, name := range names {
|
||||
if callName == name {
|
||||
return callExpr, true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// MatchCallByType ensures that the node is a call expression to a
|
||||
// specific object type.
|
||||
//
|
||||
// Usage:
|
||||
// node, matched := MatchCallByType(n, ctx, "bytes.Buffer", "WriteTo", "Write")
|
||||
//
|
||||
func MatchCallByType(n ast.Node, ctx *Context, requiredType string, calls ...string) (*ast.CallExpr, bool) {
|
||||
if callExpr, ok := n.(*ast.CallExpr); ok {
|
||||
typeName, callName, err := GetCallInfo(callExpr, ctx)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
if typeName == requiredType {
|
||||
for _, call := range calls {
|
||||
if call == callName {
|
||||
return callExpr, true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// MatchCompLit will match an ast.CompositeLit if its string value obays the given regex.
|
||||
func MatchCompLit(n ast.Node, r *regexp.Regexp) *ast.CompositeLit {
|
||||
t := reflect.TypeOf(&ast.CompositeLit{})
|
||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
||||
return n.(*ast.CompositeLit)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetInt will read and return an integer value from an ast.BasicLit
|
||||
func GetInt(n ast.Node) (int64, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.INT {
|
||||
return strconv.ParseInt(node.Value, 0, 64)
|
||||
}
|
||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a float value from an ast.BasicLit
|
||||
func GetFloat(n ast.Node) (float64, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT {
|
||||
return strconv.ParseFloat(node.Value, 64)
|
||||
}
|
||||
return 0.0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a char value from an ast.BasicLit
|
||||
func GetChar(n ast.Node) (byte, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR {
|
||||
return node.Value[0], nil
|
||||
}
|
||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetInt will read and return a string value from an ast.BasicLit
|
||||
func GetString(n ast.Node) (string, error) {
|
||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING {
|
||||
return strconv.Unquote(node.Value)
|
||||
}
|
||||
return "", fmt.Errorf("Unexpected AST node type: %T", n)
|
||||
}
|
||||
|
||||
// GetCallObject returns the object and call expression and associated
|
||||
// object for a given AST node. nil, nil will be returned if the
|
||||
// object cannot be resolved.
|
||||
func GetCallObject(n ast.Node, ctx *Context) (*ast.CallExpr, types.Object) {
|
||||
switch node := n.(type) {
|
||||
case *ast.CallExpr:
|
||||
switch fn := node.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
return node, ctx.Info.Uses[fn]
|
||||
case *ast.SelectorExpr:
|
||||
return node, ctx.Info.Uses[fn.Sel]
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// GetCallInfo returns the package or type and name associated with a
|
||||
// call expression.
|
||||
func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) {
|
||||
switch node := n.(type) {
|
||||
case *ast.CallExpr:
|
||||
switch fn := node.Fun.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
switch expr := fn.X.(type) {
|
||||
case *ast.Ident:
|
||||
if expr.Obj != nil && expr.Obj.Kind == ast.Var {
|
||||
t := ctx.Info.TypeOf(expr)
|
||||
if t != nil {
|
||||
return t.String(), fn.Sel.Name, nil
|
||||
} else {
|
||||
return "undefined", fn.Sel.Name, fmt.Errorf("missing type info")
|
||||
}
|
||||
} else {
|
||||
return expr.Name, fn.Sel.Name, nil
|
||||
}
|
||||
}
|
||||
case *ast.Ident:
|
||||
return ctx.Pkg.Name(), fn.Name, nil
|
||||
}
|
||||
}
|
||||
return "", "", fmt.Errorf("unable to determine call info")
|
||||
}
|
||||
|
||||
// GetImportedName returns the name used for the package within the
|
||||
// code. It will resolve aliases and ignores initalization only imports.
|
||||
func GetImportedName(path string, ctx *Context) (string, bool) {
|
||||
importName, imported := ctx.Imports.Imported[path]
|
||||
if !imported {
|
||||
return "", false
|
||||
}
|
||||
|
||||
if _, initonly := ctx.Imports.InitOnly[path]; initonly {
|
||||
return "", false
|
||||
}
|
||||
|
||||
if alias, ok := ctx.Imports.Aliased[path]; ok {
|
||||
importName = alias
|
||||
}
|
||||
return importName, true
|
||||
}
|
||||
|
||||
// GetImportPath resolves the full import path of an identifer based on
|
||||
// the imports in the current context.
|
||||
func GetImportPath(name string, ctx *Context) (string, bool) {
|
||||
for path, _ := range ctx.Imports.Imported {
|
||||
if imported, ok := GetImportedName(path, ctx); ok && imported == name {
|
||||
return path, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// GetLocation returns the filename and line number of an ast.Node
|
||||
func GetLocation(n ast.Node, ctx *Context) (string, int) {
|
||||
fobj := ctx.FileSet.File(n.Pos())
|
||||
return fobj.Name(), fobj.Line(n.Pos())
|
||||
}
|
108
tools/vendor/github.com/GoASTScanner/gas/core/issue.go
generated
vendored
Normal file
108
tools/vendor/github.com/GoASTScanner/gas/core/issue.go
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package core
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Score type used by severity and confidence values
|
||||
type Score int
|
||||
|
||||
const (
|
||||
Low Score = iota // Low value
|
||||
Medium // Medium value
|
||||
High // High value
|
||||
)
|
||||
|
||||
// An Issue is returnd by a GAS rule if it discovers an issue with the scanned code.
|
||||
type Issue struct {
|
||||
Severity Score `json:"severity"` // issue severity (how problematic it is)
|
||||
Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it)
|
||||
What string `json:"details"` // Human readable explanation
|
||||
File string `json:"file"` // File name we found it in
|
||||
Code string `json:"code"` // Impacted code line
|
||||
Line int `json:"line"` // Line number in file
|
||||
}
|
||||
|
||||
// MetaData is embedded in all GAS rules. The Severity, Confidence and What message
|
||||
// will be passed tbhrough to reported issues.
|
||||
type MetaData struct {
|
||||
Severity Score
|
||||
Confidence Score
|
||||
What string
|
||||
}
|
||||
|
||||
// MarshalJSON is used convert a Score object into a JSON representation
|
||||
func (c Score) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(c.String())
|
||||
}
|
||||
|
||||
// String converts a Score into a string
|
||||
func (c Score) String() string {
|
||||
switch c {
|
||||
case High:
|
||||
return "HIGH"
|
||||
case Medium:
|
||||
return "MEDIUM"
|
||||
case Low:
|
||||
return "LOW"
|
||||
}
|
||||
return "UNDEFINED"
|
||||
}
|
||||
|
||||
func codeSnippet(file *os.File, start int64, end int64, n ast.Node) (string, error) {
|
||||
if n == nil {
|
||||
return "", fmt.Errorf("Invalid AST node provided")
|
||||
}
|
||||
|
||||
size := (int)(end - start) // Go bug, os.File.Read should return int64 ...
|
||||
file.Seek(start, 0)
|
||||
|
||||
buf := make([]byte, size)
|
||||
if nread, err := file.Read(buf); err != nil || nread != size {
|
||||
return "", fmt.Errorf("Unable to read code")
|
||||
}
|
||||
return string(buf), nil
|
||||
}
|
||||
|
||||
// NewIssue creates a new Issue
|
||||
func NewIssue(ctx *Context, node ast.Node, desc string, severity Score, confidence Score) *Issue {
|
||||
var code string
|
||||
fobj := ctx.FileSet.File(node.Pos())
|
||||
name := fobj.Name()
|
||||
line := fobj.Line(node.Pos())
|
||||
|
||||
if file, err := os.Open(fobj.Name()); err == nil {
|
||||
defer file.Close()
|
||||
s := (int64)(fobj.Position(node.Pos()).Offset) // Go bug, should be int64
|
||||
e := (int64)(fobj.Position(node.End()).Offset) // Go bug, should be int64
|
||||
code, err = codeSnippet(file, s, e, node)
|
||||
if err != nil {
|
||||
code = err.Error()
|
||||
}
|
||||
}
|
||||
|
||||
return &Issue{
|
||||
File: name,
|
||||
Line: line,
|
||||
What: desc,
|
||||
Confidence: confidence,
|
||||
Severity: severity,
|
||||
Code: code,
|
||||
}
|
||||
}
|
81
tools/vendor/github.com/GoASTScanner/gas/core/resolve.go
generated
vendored
Normal file
81
tools/vendor/github.com/GoASTScanner/gas/core/resolve.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
|
||||
import "go/ast"
|
||||
|
||||
func resolveIdent(n *ast.Ident, c *Context) bool {
|
||||
if n.Obj == nil || n.Obj.Kind != ast.Var {
|
||||
return true
|
||||
}
|
||||
if node, ok := n.Obj.Decl.(ast.Node); ok {
|
||||
return TryResolve(node, c)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func resolveAssign(n *ast.AssignStmt, c *Context) bool {
|
||||
for _, arg := range n.Rhs {
|
||||
if !TryResolve(arg, c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func resolveCompLit(n *ast.CompositeLit, c *Context) bool {
|
||||
for _, arg := range n.Elts {
|
||||
if !TryResolve(arg, c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func resolveBinExpr(n *ast.BinaryExpr, c *Context) bool {
|
||||
return (TryResolve(n.X, c) && TryResolve(n.Y, c))
|
||||
}
|
||||
|
||||
func resolveCallExpr(n *ast.CallExpr, c *Context) bool {
|
||||
// TODO(tkelsey): next step, full function resolution
|
||||
return false
|
||||
}
|
||||
|
||||
// TryResolve will attempt, given a subtree starting at some ATS node, to resolve
|
||||
// all values contained within to a known constant. It is used to check for any
|
||||
// unkown values in compound expressions.
|
||||
func TryResolve(n ast.Node, c *Context) bool {
|
||||
switch node := n.(type) {
|
||||
case *ast.BasicLit:
|
||||
return true
|
||||
|
||||
case *ast.CompositeLit:
|
||||
return resolveCompLit(node, c)
|
||||
|
||||
case *ast.Ident:
|
||||
return resolveIdent(node, c)
|
||||
|
||||
case *ast.AssignStmt:
|
||||
return resolveAssign(node, c)
|
||||
|
||||
case *ast.CallExpr:
|
||||
return resolveCallExpr(node, c)
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
return resolveBinExpr(node, c)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
404
tools/vendor/github.com/GoASTScanner/gas/core/select.go
generated
vendored
Normal file
404
tools/vendor/github.com/GoASTScanner/gas/core/select.go
generated
vendored
Normal file
@ -0,0 +1,404 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// SelectFunc is like an AST visitor, but has a richer interface. It
|
||||
// is called with the current ast.Node being visitied and that nodes depth in
|
||||
// the tree. The function can return true to continue traversing the tree, or
|
||||
// false to end traversal here.
|
||||
type SelectFunc func(ast.Node, int) bool
|
||||
|
||||
func walkIdentList(list []*ast.Ident, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkExprList(list []ast.Expr, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkStmtList(list []ast.Stmt, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func walkDeclList(list []ast.Decl, depth int, fun SelectFunc) {
|
||||
for _, x := range list {
|
||||
depthWalk(x, depth, fun)
|
||||
}
|
||||
}
|
||||
|
||||
func depthWalk(node ast.Node, depth int, fun SelectFunc) {
|
||||
if !fun(node, depth) {
|
||||
return
|
||||
}
|
||||
|
||||
switch n := node.(type) {
|
||||
// Comments and fields
|
||||
case *ast.Comment:
|
||||
|
||||
case *ast.CommentGroup:
|
||||
for _, c := range n.List {
|
||||
depthWalk(c, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.Field:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
walkIdentList(n.Names, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Tag != nil {
|
||||
depthWalk(n.Tag, depth+1, fun)
|
||||
}
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FieldList:
|
||||
for _, f := range n.List {
|
||||
depthWalk(f, depth+1, fun)
|
||||
}
|
||||
|
||||
// Expressions
|
||||
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
|
||||
|
||||
case *ast.Ellipsis:
|
||||
if n.Elt != nil {
|
||||
depthWalk(n.Elt, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FuncLit:
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.CompositeLit:
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
walkExprList(n.Elts, depth+1, fun)
|
||||
|
||||
case *ast.ParenExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Sel, depth+1, fun)
|
||||
|
||||
case *ast.IndexExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Index, depth+1, fun)
|
||||
|
||||
case *ast.SliceExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
if n.Low != nil {
|
||||
depthWalk(n.Low, depth+1, fun)
|
||||
}
|
||||
if n.High != nil {
|
||||
depthWalk(n.High, depth+1, fun)
|
||||
}
|
||||
if n.Max != nil {
|
||||
depthWalk(n.Max, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.TypeAssertExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
depthWalk(n.Fun, depth+1, fun)
|
||||
walkExprList(n.Args, depth+1, fun)
|
||||
|
||||
case *ast.StarExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.UnaryExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Y, depth+1, fun)
|
||||
|
||||
case *ast.KeyValueExpr:
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
// Types
|
||||
case *ast.ArrayType:
|
||||
if n.Len != nil {
|
||||
depthWalk(n.Len, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Elt, depth+1, fun)
|
||||
|
||||
case *ast.StructType:
|
||||
depthWalk(n.Fields, depth+1, fun)
|
||||
|
||||
case *ast.FuncType:
|
||||
if n.Params != nil {
|
||||
depthWalk(n.Params, depth+1, fun)
|
||||
}
|
||||
if n.Results != nil {
|
||||
depthWalk(n.Results, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.InterfaceType:
|
||||
depthWalk(n.Methods, depth+1, fun)
|
||||
|
||||
case *ast.MapType:
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
case *ast.ChanType:
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
// Statements
|
||||
case *ast.BadStmt:
|
||||
|
||||
case *ast.DeclStmt:
|
||||
depthWalk(n.Decl, depth+1, fun)
|
||||
|
||||
case *ast.EmptyStmt:
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
depthWalk(n.Label, depth+1, fun)
|
||||
depthWalk(n.Stmt, depth+1, fun)
|
||||
|
||||
case *ast.ExprStmt:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.SendStmt:
|
||||
depthWalk(n.Chan, depth+1, fun)
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
|
||||
case *ast.IncDecStmt:
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
|
||||
case *ast.AssignStmt:
|
||||
walkExprList(n.Lhs, depth+1, fun)
|
||||
walkExprList(n.Rhs, depth+1, fun)
|
||||
|
||||
case *ast.GoStmt:
|
||||
depthWalk(n.Call, depth+1, fun)
|
||||
|
||||
case *ast.DeferStmt:
|
||||
depthWalk(n.Call, depth+1, fun)
|
||||
|
||||
case *ast.ReturnStmt:
|
||||
walkExprList(n.Results, depth+1, fun)
|
||||
|
||||
case *ast.BranchStmt:
|
||||
if n.Label != nil {
|
||||
depthWalk(n.Label, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.BlockStmt:
|
||||
walkStmtList(n.List, depth+1, fun)
|
||||
|
||||
case *ast.IfStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Cond, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
if n.Else != nil {
|
||||
depthWalk(n.Else, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.CaseClause:
|
||||
walkExprList(n.List, depth+1, fun)
|
||||
walkStmtList(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
if n.Tag != nil {
|
||||
depthWalk(n.Tag, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Assign, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.CommClause:
|
||||
if n.Comm != nil {
|
||||
depthWalk(n.Comm, depth+1, fun)
|
||||
}
|
||||
walkStmtList(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.SelectStmt:
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.ForStmt:
|
||||
if n.Init != nil {
|
||||
depthWalk(n.Init, depth+1, fun)
|
||||
}
|
||||
if n.Cond != nil {
|
||||
depthWalk(n.Cond, depth+1, fun)
|
||||
}
|
||||
if n.Post != nil {
|
||||
depthWalk(n.Post, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
case *ast.RangeStmt:
|
||||
if n.Key != nil {
|
||||
depthWalk(n.Key, depth+1, fun)
|
||||
}
|
||||
if n.Value != nil {
|
||||
depthWalk(n.Value, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.X, depth+1, fun)
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
|
||||
// Declarations
|
||||
case *ast.ImportSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
if n.Name != nil {
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Path, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.ValueSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
walkIdentList(n.Names, depth+1, fun)
|
||||
if n.Type != nil {
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
}
|
||||
walkExprList(n.Values, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.TypeSpec:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Comment != nil {
|
||||
depthWalk(n.Comment, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.BadDecl:
|
||||
|
||||
case *ast.GenDecl:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
for _, s := range n.Specs {
|
||||
depthWalk(s, depth+1, fun)
|
||||
}
|
||||
|
||||
case *ast.FuncDecl:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
if n.Recv != nil {
|
||||
depthWalk(n.Recv, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
depthWalk(n.Type, depth+1, fun)
|
||||
if n.Body != nil {
|
||||
depthWalk(n.Body, depth+1, fun)
|
||||
}
|
||||
|
||||
// Files and packages
|
||||
case *ast.File:
|
||||
if n.Doc != nil {
|
||||
depthWalk(n.Doc, depth+1, fun)
|
||||
}
|
||||
depthWalk(n.Name, depth+1, fun)
|
||||
walkDeclList(n.Decls, depth+1, fun)
|
||||
// don't walk n.Comments - they have been
|
||||
// visited already through the individual
|
||||
// nodes
|
||||
|
||||
case *ast.Package:
|
||||
for _, f := range n.Files {
|
||||
depthWalk(f, depth+1, fun)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("gas.depthWalk: unexpected node type %T", n))
|
||||
}
|
||||
}
|
||||
|
||||
type Selector interface {
|
||||
Final(ast.Node)
|
||||
Partial(ast.Node) bool
|
||||
}
|
||||
|
||||
func Select(s Selector, n ast.Node, bits ...reflect.Type) {
|
||||
fun := func(n ast.Node, d int) bool {
|
||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
||||
if d == len(bits)-1 {
|
||||
s.Final(n)
|
||||
return false
|
||||
} else if s.Partial(n) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
depthWalk(n, 0, fun)
|
||||
}
|
||||
|
||||
// SimpleSelect will try to match a path through a sub-tree starting at a given AST node.
|
||||
// The type of each node in the path at a given depth must match its entry in list of
|
||||
// node types given.
|
||||
func SimpleSelect(n ast.Node, bits ...reflect.Type) ast.Node {
|
||||
var found ast.Node
|
||||
fun := func(n ast.Node, d int) bool {
|
||||
if found != nil {
|
||||
return false // short cut logic if we have found a match
|
||||
}
|
||||
|
||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
||||
if d == len(bits)-1 {
|
||||
found = n
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
depthWalk(n, 0, fun)
|
||||
return found
|
||||
}
|
87
tools/vendor/github.com/GoASTScanner/gas/filelist.go
generated
vendored
Normal file
87
tools/vendor/github.com/GoASTScanner/gas/filelist.go
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/ryanuber/go-glob"
|
||||
)
|
||||
|
||||
// fileList uses a map for patterns to ensure each pattern only
|
||||
// appears once
|
||||
type fileList struct {
|
||||
patterns map[string]struct{}
|
||||
}
|
||||
|
||||
func newFileList(paths ...string) *fileList {
|
||||
f := &fileList{
|
||||
patterns: make(map[string]struct{}),
|
||||
}
|
||||
for _, p := range paths {
|
||||
f.patterns[p] = struct{}{}
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *fileList) String() string {
|
||||
ps := make([]string, 0, len(f.patterns))
|
||||
for p := range f.patterns {
|
||||
ps = append(ps, p)
|
||||
}
|
||||
sort.Strings(ps)
|
||||
return strings.Join(ps, ", ")
|
||||
}
|
||||
|
||||
func (f *fileList) Set(path string) error {
|
||||
if path == "" {
|
||||
// don't bother adding the empty path
|
||||
return nil
|
||||
}
|
||||
f.patterns[path] = struct{}{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f fileList) Contains(path string) bool {
|
||||
for p := range f.patterns {
|
||||
if strings.Contains(p, glob.GLOB) {
|
||||
if glob.Glob(p, path) {
|
||||
if logger != nil {
|
||||
logger.Printf("skipping: %s\n", path)
|
||||
}
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
// check if only a sub-folder of the path is excluded
|
||||
if strings.Contains(path, p) {
|
||||
if logger != nil {
|
||||
logger.Printf("skipping: %s\n", path)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
func (f fileList) Dump() {
|
||||
for k, _ := range f.paths {
|
||||
println(k)
|
||||
}
|
||||
}
|
||||
*/
|
293
tools/vendor/github.com/GoASTScanner/gas/main.go
generated
vendored
Normal file
293
tools/vendor/github.com/GoASTScanner/gas/main.go
generated
vendored
Normal file
@ -0,0 +1,293 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas/output"
|
||||
)
|
||||
|
||||
type recursion bool
|
||||
|
||||
const (
|
||||
recurse recursion = true
|
||||
noRecurse recursion = false
|
||||
)
|
||||
|
||||
var (
|
||||
// #nosec flag
|
||||
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
||||
|
||||
// format output
|
||||
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
|
||||
|
||||
// output file
|
||||
flagOutput = flag.String("out", "", "Set output file for results")
|
||||
|
||||
// config file
|
||||
flagConfig = flag.String("conf", "", "Path to optional config file")
|
||||
|
||||
// quiet
|
||||
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
||||
|
||||
usageText = `
|
||||
GAS - Go AST Scanner
|
||||
|
||||
Gas analyzes Go source code to look for common programming mistakes that
|
||||
can lead to security problems.
|
||||
|
||||
USAGE:
|
||||
|
||||
# Check a single Go file
|
||||
$ gas example.go
|
||||
|
||||
# Check all files under the current directory and save results in
|
||||
# json format.
|
||||
$ gas -fmt=json -out=results.json ./...
|
||||
|
||||
# Run a specific set of rules (by default all rules will be run):
|
||||
$ gas -include=G101,G203,G401 ./...
|
||||
|
||||
# Run all rules except the provided
|
||||
$ gas -exclude=G101 ./...
|
||||
|
||||
`
|
||||
|
||||
logger *log.Logger
|
||||
)
|
||||
|
||||
func extendConfList(conf map[string]interface{}, name string, inputStr string) {
|
||||
if inputStr == "" {
|
||||
conf[name] = []string{}
|
||||
} else {
|
||||
input := strings.Split(inputStr, ",")
|
||||
if val, ok := conf[name]; ok {
|
||||
if data, ok := val.(*[]string); ok {
|
||||
conf[name] = append(*data, input...)
|
||||
} else {
|
||||
logger.Fatal("Config item must be a string list: ", name)
|
||||
}
|
||||
} else {
|
||||
conf[name] = input
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func buildConfig(incRules string, excRules string) map[string]interface{} {
|
||||
config := make(map[string]interface{})
|
||||
if flagConfig != nil && *flagConfig != "" { // parse config if we have one
|
||||
if data, err := ioutil.ReadFile(*flagConfig); err == nil {
|
||||
if err := json.Unmarshal(data, &(config)); err != nil {
|
||||
logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err)
|
||||
}
|
||||
} else {
|
||||
logger.Fatal("Could not read config file: ", *flagConfig)
|
||||
}
|
||||
}
|
||||
|
||||
// add in CLI include and exclude data
|
||||
extendConfList(config, "include", incRules)
|
||||
extendConfList(config, "exclude", excRules)
|
||||
|
||||
// override ignoreNosec if given on CLI
|
||||
if flagIgnoreNoSec != nil {
|
||||
config["ignoreNosec"] = *flagIgnoreNoSec
|
||||
} else {
|
||||
val, ok := config["ignoreNosec"]
|
||||
if !ok {
|
||||
config["ignoreNosec"] = false
|
||||
} else if _, ok := val.(bool); !ok {
|
||||
logger.Fatal("Config value must be a bool: 'ignoreNosec'")
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
// #nosec
|
||||
func usage() {
|
||||
|
||||
fmt.Fprintln(os.Stderr, usageText)
|
||||
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
||||
flag.PrintDefaults()
|
||||
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
||||
|
||||
// sorted rule list for eas of reading
|
||||
rl := GetFullRuleList()
|
||||
keys := make([]string, 0, len(rl))
|
||||
for key := range rl {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
v := rl[k]
|
||||
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.description)
|
||||
}
|
||||
fmt.Fprint(os.Stderr, "\n")
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
// Setup usage description
|
||||
flag.Usage = usage
|
||||
|
||||
// Exclude files
|
||||
excluded := newFileList("*_test.go")
|
||||
flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match")
|
||||
|
||||
incRules := ""
|
||||
flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
||||
|
||||
excRules := ""
|
||||
flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
||||
|
||||
// Custom commands / utilities to run instead of default analyzer
|
||||
tools := newUtils()
|
||||
flag.Var(tools, "tool", "GAS utilities to assist with rule development")
|
||||
|
||||
// Setup logging
|
||||
logger = log.New(os.Stderr, "[gas] ", log.LstdFlags)
|
||||
|
||||
// Parse command line arguments
|
||||
flag.Parse()
|
||||
|
||||
// Ensure at least one file was specified
|
||||
if flag.NArg() == 0 {
|
||||
|
||||
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Run utils instead of analysis
|
||||
if len(tools.call) > 0 {
|
||||
tools.run(flag.Args()...)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Setup analyzer
|
||||
config := buildConfig(incRules, excRules)
|
||||
analyzer := gas.NewAnalyzer(config, logger)
|
||||
AddRules(&analyzer, config)
|
||||
|
||||
toAnalyze := getFilesToAnalyze(flag.Args(), excluded)
|
||||
|
||||
for _, file := range toAnalyze {
|
||||
logger.Printf(`Processing "%s"...`, file)
|
||||
if err := analyzer.Process(file); err != nil {
|
||||
logger.Printf(`Failed to process: "%s"`, file)
|
||||
logger.Println(err)
|
||||
logger.Fatalf(`Halting execution.`)
|
||||
}
|
||||
}
|
||||
|
||||
issuesFound := len(analyzer.Issues) > 0
|
||||
// Exit quietly if nothing was found
|
||||
if !issuesFound && *flagQuiet {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Create output report
|
||||
if *flagOutput != "" {
|
||||
outfile, err := os.Create(*flagOutput)
|
||||
if err != nil {
|
||||
logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err)
|
||||
}
|
||||
defer outfile.Close()
|
||||
output.CreateReport(outfile, *flagFormat, &analyzer)
|
||||
} else {
|
||||
output.CreateReport(os.Stdout, *flagFormat, &analyzer)
|
||||
}
|
||||
|
||||
// Do we have an issue? If so exit 1
|
||||
if issuesFound {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// getFilesToAnalyze lists all files
|
||||
func getFilesToAnalyze(paths []string, excluded *fileList) []string {
|
||||
//log.Println("getFilesToAnalyze: start")
|
||||
var toAnalyze []string
|
||||
for _, relativePath := range paths {
|
||||
//log.Printf("getFilesToAnalyze: processing \"%s\"\n", path)
|
||||
// get the absolute path before doing anything else
|
||||
path, err := filepath.Abs(relativePath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if filepath.Base(relativePath) == "..." {
|
||||
toAnalyze = append(
|
||||
toAnalyze,
|
||||
listFiles(filepath.Dir(path), recurse, excluded)...,
|
||||
)
|
||||
} else {
|
||||
var (
|
||||
finfo os.FileInfo
|
||||
err error
|
||||
)
|
||||
if finfo, err = os.Stat(path); err != nil {
|
||||
logger.Fatal(err)
|
||||
}
|
||||
if !finfo.IsDir() {
|
||||
if shouldInclude(path, excluded) {
|
||||
toAnalyze = append(toAnalyze, path)
|
||||
}
|
||||
} else {
|
||||
toAnalyze = listFiles(path, noRecurse, excluded)
|
||||
}
|
||||
}
|
||||
}
|
||||
//log.Println("getFilesToAnalyze: end")
|
||||
return toAnalyze
|
||||
}
|
||||
|
||||
// listFiles returns a list of all files found that pass the shouldInclude check.
|
||||
// If doRecursiveWalk it true, it will walk the tree rooted at absPath, otherwise it
|
||||
// will only include files directly within the dir referenced by absPath.
|
||||
func listFiles(absPath string, doRecursiveWalk recursion, excluded *fileList) []string {
|
||||
var files []string
|
||||
|
||||
walk := func(path string, info os.FileInfo, err error) error {
|
||||
if info.IsDir() && doRecursiveWalk == noRecurse {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if shouldInclude(path, excluded) {
|
||||
files = append(files, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := filepath.Walk(absPath, walk); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
// shouldInclude checks if a specific path which is expected to reference
|
||||
// a regular file should be included
|
||||
func shouldInclude(path string, excluded *fileList) bool {
|
||||
return filepath.Ext(path) == ".go" && !excluded.Contains(path)
|
||||
}
|
116
tools/vendor/github.com/GoASTScanner/gas/output/formatter.go
generated
vendored
Normal file
116
tools/vendor/github.com/GoASTScanner/gas/output/formatter.go
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package output
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
htmlTemplate "html/template"
|
||||
"io"
|
||||
"strconv"
|
||||
plainTemplate "text/template"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
// The output format for reported issues
|
||||
type ReportFormat int
|
||||
|
||||
const (
|
||||
ReportText ReportFormat = iota // Plain text format
|
||||
ReportJSON // Json format
|
||||
ReportCSV // CSV format
|
||||
)
|
||||
|
||||
var text = `Results:
|
||||
{{ range $index, $issue := .Issues }}
|
||||
[{{ $issue.File }}:{{ $issue.Line }}] - {{ $issue.What }} (Confidence: {{ $issue.Confidence}}, Severity: {{ $issue.Severity }})
|
||||
> {{ $issue.Code }}
|
||||
|
||||
{{ end }}
|
||||
Summary:
|
||||
Files: {{.Stats.NumFiles}}
|
||||
Lines: {{.Stats.NumLines}}
|
||||
Nosec: {{.Stats.NumNosec}}
|
||||
Issues: {{.Stats.NumFound}}
|
||||
|
||||
`
|
||||
|
||||
func CreateReport(w io.Writer, format string, data *gas.Analyzer) error {
|
||||
var err error
|
||||
switch format {
|
||||
case "json":
|
||||
err = reportJSON(w, data)
|
||||
case "csv":
|
||||
err = reportCSV(w, data)
|
||||
case "html":
|
||||
err = reportFromHTMLTemplate(w, html, data)
|
||||
case "text":
|
||||
err = reportFromPlaintextTemplate(w, text, data)
|
||||
default:
|
||||
err = reportFromPlaintextTemplate(w, text, data)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func reportJSON(w io.Writer, data *gas.Analyzer) error {
|
||||
raw, err := json.MarshalIndent(data, "", "\t")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, err = w.Write(raw)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func reportCSV(w io.Writer, data *gas.Analyzer) error {
|
||||
out := csv.NewWriter(w)
|
||||
defer out.Flush()
|
||||
for _, issue := range data.Issues {
|
||||
err := out.Write([]string{
|
||||
issue.File,
|
||||
strconv.Itoa(issue.Line),
|
||||
issue.What,
|
||||
issue.Severity.String(),
|
||||
issue.Confidence.String(),
|
||||
issue.Code,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
||||
t, e := plainTemplate.New("gas").Parse(reportTemplate)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
|
||||
return t.Execute(w, data)
|
||||
}
|
||||
|
||||
func reportFromHTMLTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
||||
t, e := htmlTemplate.New("gas").Parse(reportTemplate)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
|
||||
return t.Execute(w, data)
|
||||
}
|
401
tools/vendor/github.com/GoASTScanner/gas/output/template.go
generated
vendored
Normal file
401
tools/vendor/github.com/GoASTScanner/gas/output/template.go
generated
vendored
Normal file
@ -0,0 +1,401 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package output
|
||||
|
||||
const html = `
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Go AST Scanner</title>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bulma/0.2.1/css/bulma.min.css" integrity="sha256-DRcOKg8NK1KkSkcymcGmxOtS/lAn0lHWJXRa15gMHHk=" crossorigin="anonymous"/>
|
||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/react/15.3.2/react.min.js" integrity="sha256-cLWs9L+cjZg8CjGHMpJqUgKKouPlmoMP/0wIdPtaPGs=" crossorigin="anonymous"></script>
|
||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/react/15.3.2/react-dom.min.js" integrity="sha256-JIW8lNqN2EtqC6ggNZYnAdKMJXRQfkPMvdRt+b0/Jxc=" crossorigin="anonymous"></script>
|
||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/babel-standalone/6.17.0/babel.min.js" integrity="sha256-1IWWLlCKFGFj/cjryvC7GDF5wRYnf9tSvNVVEj8Bm+o=" crossorigin="anonymous"></script>
|
||||
<style>
|
||||
div.issue div.tag, div.panel-block input[type="checkbox"] {
|
||||
margin-right: 0.5em;
|
||||
}
|
||||
|
||||
label.disabled {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
|
||||
nav.panel select {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.break-word {
|
||||
word-wrap: break-word;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<section class="section">
|
||||
<div class="container">
|
||||
<div id="content"></div>
|
||||
</div>
|
||||
</section>
|
||||
<script>
|
||||
var data = {{ . }};
|
||||
</script>
|
||||
<script type="text/babel">
|
||||
var IssueTag = React.createClass({
|
||||
render: function() {
|
||||
var level = ""
|
||||
if (this.props.level === "HIGH") {
|
||||
level = "is-danger";
|
||||
}
|
||||
if (this.props.level === "MEDIUM") {
|
||||
level = "is-warning";
|
||||
}
|
||||
return (
|
||||
<div className={ "tag " + level }>
|
||||
{ this.props.label }: { this.props.level }
|
||||
</div>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var Issue = React.createClass({
|
||||
render: function() {
|
||||
return (
|
||||
<div className="issue box">
|
||||
<div className="is-pulled-right">
|
||||
<IssueTag label="Severity" level={ this.props.data.severity }/>
|
||||
<IssueTag label="Confidence" level={ this.props.data.confidence }/>
|
||||
</div>
|
||||
<p>
|
||||
<strong className="break-word">
|
||||
{ this.props.data.file } (line { this.props.data.line })
|
||||
</strong>
|
||||
<br/>
|
||||
{ this.props.data.details }
|
||||
</p>
|
||||
<figure className="highlight">
|
||||
<pre>
|
||||
<code className="golang hljs">
|
||||
{ this.props.data.code }
|
||||
</code>
|
||||
</pre>
|
||||
</figure>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var Stats = React.createClass({
|
||||
render: function() {
|
||||
return (
|
||||
<p className="help">
|
||||
Scanned { this.props.data.metrics.files.toLocaleString() } files
|
||||
with { this.props.data.metrics.lines.toLocaleString() } lines of code.
|
||||
</p>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var Issues = React.createClass({
|
||||
render: function() {
|
||||
if (this.props.data.metrics.files === 0) {
|
||||
return (
|
||||
<div className="notification">
|
||||
No source files found. Do you even Go?
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (this.props.data.issues.length === 0) {
|
||||
return (
|
||||
<div>
|
||||
<div className="notification">
|
||||
Awesome! No issues found!
|
||||
</div>
|
||||
<Stats data={ this.props.data } />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
var issues = this.props.data.issues
|
||||
.filter(function(issue) {
|
||||
return this.props.severity.includes(issue.severity);
|
||||
}.bind(this))
|
||||
.filter(function(issue) {
|
||||
return this.props.confidence.includes(issue.confidence);
|
||||
}.bind(this))
|
||||
.filter(function(issue) {
|
||||
if (this.props.issueType) {
|
||||
return issue.details.toLowerCase().startsWith(this.props.issueType.toLowerCase());
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}.bind(this))
|
||||
.map(function(issue) {
|
||||
return (<Issue data={issue} />);
|
||||
}.bind(this));
|
||||
|
||||
if (issues.length === 0) {
|
||||
return (
|
||||
<div>
|
||||
<div className="notification">
|
||||
No issues matched given filters
|
||||
(of total { this.props.data.issues.length } issues).
|
||||
</div>
|
||||
<Stats data={ this.props.data } />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="issues">
|
||||
{ issues }
|
||||
<Stats data={ this.props.data } />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var LevelSelector = React.createClass({
|
||||
handleChange: function(level) {
|
||||
return function(e) {
|
||||
var updated = this.props.selected
|
||||
.filter(function(item) { return item != level; });
|
||||
if (e.target.checked) {
|
||||
updated.push(level);
|
||||
}
|
||||
this.props.onChange(updated);
|
||||
}.bind(this);
|
||||
},
|
||||
render: function() {
|
||||
var highDisabled = !this.props.available.includes("HIGH");
|
||||
var mediumDisabled = !this.props.available.includes("MEDIUM");
|
||||
var lowDisabled = !this.props.available.includes("LOW");
|
||||
|
||||
return (
|
||||
<span>
|
||||
<label className={"label checkbox " + (highDisabled ? "disabled" : "") }>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={ this.props.selected.includes("HIGH") }
|
||||
disabled={ highDisabled }
|
||||
onChange={ this.handleChange("HIGH") }/>
|
||||
High
|
||||
</label>
|
||||
<label className={"label checkbox " + (mediumDisabled ? "disabled" : "") }>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={ this.props.selected.includes("MEDIUM") }
|
||||
disabled={ mediumDisabled }
|
||||
onChange={ this.handleChange("MEDIUM") }/>
|
||||
Medium
|
||||
</label>
|
||||
<label className={"label checkbox " + (lowDisabled ? "disabled" : "") }>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={ this.props.selected.includes("LOW") }
|
||||
disabled={ lowDisabled }
|
||||
onChange={ this.handleChange("LOW") }/>
|
||||
Low
|
||||
</label>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var Navigation = React.createClass({
|
||||
updateSeverity: function(vals) {
|
||||
this.props.onSeverity(vals);
|
||||
},
|
||||
updateConfidence: function(vals) {
|
||||
this.props.onConfidence(vals);
|
||||
},
|
||||
updateIssueType: function(e) {
|
||||
if (e.target.value == "all") {
|
||||
this.props.onIssueType(null);
|
||||
} else {
|
||||
this.props.onIssueType(e.target.value);
|
||||
}
|
||||
},
|
||||
render: function() {
|
||||
var issueTypes = this.props.allIssueTypes
|
||||
.map(function(it) {
|
||||
return (
|
||||
<option value={ it } selected={ this.props.issueType == it }>
|
||||
{ it }
|
||||
</option>
|
||||
);
|
||||
}.bind(this));
|
||||
|
||||
return (
|
||||
<nav className="panel">
|
||||
<div className="panel-heading">
|
||||
Filters
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<strong>
|
||||
Severity
|
||||
</strong>
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<LevelSelector
|
||||
selected={ this.props.severity }
|
||||
available={ this.props.allSeverities }
|
||||
onChange={ this.updateSeverity } />
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<strong>
|
||||
Confidence
|
||||
</strong>
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<LevelSelector
|
||||
selected={ this.props.confidence }
|
||||
available={ this.props.allConfidences }
|
||||
onChange={ this.updateConfidence } />
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<strong>
|
||||
Issue Type
|
||||
</strong>
|
||||
</div>
|
||||
<div className="panel-block">
|
||||
<select onChange={ this.updateIssueType }>
|
||||
<option value="all" selected={ !this.props.issueType }>
|
||||
(all)
|
||||
</option>
|
||||
{ issueTypes }
|
||||
</select>
|
||||
</div>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
var IssueBrowser = React.createClass({
|
||||
getInitialState: function() {
|
||||
return {};
|
||||
},
|
||||
componentWillMount: function() {
|
||||
this.updateIssues(this.props.data);
|
||||
},
|
||||
handleSeverity: function(val) {
|
||||
this.updateIssueTypes(this.props.data.issues, val, this.state.confidence);
|
||||
this.setState({severity: val});
|
||||
},
|
||||
handleConfidence: function(val) {
|
||||
this.updateIssueTypes(this.props.data.issues, this.state.severity, val);
|
||||
this.setState({confidence: val});
|
||||
},
|
||||
handleIssueType: function(val) {
|
||||
this.setState({issueType: val});
|
||||
},
|
||||
updateIssues: function(data) {
|
||||
if (!data) {
|
||||
this.setState({data: data});
|
||||
return;
|
||||
}
|
||||
|
||||
var allSeverities = data.issues
|
||||
.map(function(issue) {
|
||||
return issue.severity
|
||||
})
|
||||
.sort()
|
||||
.filter(function(item, pos, ary) {
|
||||
return !pos || item != ary[pos - 1];
|
||||
});
|
||||
|
||||
var allConfidences = data.issues
|
||||
.map(function(issue) {
|
||||
return issue.confidence
|
||||
})
|
||||
.sort()
|
||||
.filter(function(item, pos, ary) {
|
||||
return !pos || item != ary[pos - 1];
|
||||
});
|
||||
|
||||
var selectedSeverities = allSeverities;
|
||||
var selectedConfidences = allConfidences;
|
||||
|
||||
this.updateIssueTypes(data.issues, selectedSeverities, selectedConfidences);
|
||||
|
||||
this.setState({
|
||||
data: data,
|
||||
severity: selectedSeverities,
|
||||
allSeverities: allSeverities,
|
||||
confidence: selectedConfidences,
|
||||
allConfidences: allConfidences,
|
||||
issueType: null
|
||||
});
|
||||
},
|
||||
updateIssueTypes: function(issues, severities, confidences) {
|
||||
var allTypes = issues
|
||||
.filter(function(issue) {
|
||||
return severities.includes(issue.severity);
|
||||
})
|
||||
.filter(function(issue) {
|
||||
return confidences.includes(issue.confidence);
|
||||
})
|
||||
.map(function(issue) {
|
||||
return issue.details;
|
||||
})
|
||||
.sort()
|
||||
.filter(function(item, pos, ary) {
|
||||
return !pos || item != ary[pos - 1];
|
||||
});
|
||||
|
||||
if (this.state.issueType && !allTypes.includes(this.state.issueType)) {
|
||||
this.setState({issueType: null});
|
||||
}
|
||||
|
||||
this.setState({allIssueTypes: allTypes});
|
||||
},
|
||||
render: function() {
|
||||
return (
|
||||
<div className="content">
|
||||
<div className="columns">
|
||||
<div className="column is-one-quarter">
|
||||
<Navigation
|
||||
severity={ this.state.severity }
|
||||
confidence={ this.state.confidence }
|
||||
issueType={ this.state.issueType }
|
||||
allSeverities={ this.state.allSeverities }
|
||||
allConfidences={ this.state.allConfidences }
|
||||
allIssueTypes={ this.state.allIssueTypes }
|
||||
onSeverity={ this.handleSeverity }
|
||||
onConfidence={ this.handleConfidence }
|
||||
onIssueType={ this.handleIssueType }
|
||||
/>
|
||||
</div>
|
||||
<div className="column is-three-quarters">
|
||||
<Issues
|
||||
data={ this.props.data }
|
||||
severity={ this.state.severity }
|
||||
confidence={ this.state.confidence }
|
||||
issueType={ this.state.issueType }
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
ReactDOM.render(
|
||||
<IssueBrowser data={ data } />,
|
||||
document.getElementById("content")
|
||||
);
|
||||
</script>
|
||||
</body>
|
||||
</html>`
|
91
tools/vendor/github.com/GoASTScanner/gas/rulelist.go
generated
vendored
Normal file
91
tools/vendor/github.com/GoASTScanner/gas/rulelist.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"github.com/GoASTScanner/gas/rules"
|
||||
)
|
||||
|
||||
type RuleInfo struct {
|
||||
description string
|
||||
build func(map[string]interface{}) (gas.Rule, []ast.Node)
|
||||
}
|
||||
|
||||
// GetFullRuleList get the full list of all rules available to GAS
|
||||
func GetFullRuleList() map[string]RuleInfo {
|
||||
return map[string]RuleInfo{
|
||||
// misc
|
||||
"G101": RuleInfo{"Look for hardcoded credentials", rules.NewHardcodedCredentials},
|
||||
"G102": RuleInfo{"Bind to all interfaces", rules.NewBindsToAllNetworkInterfaces},
|
||||
"G103": RuleInfo{"Audit the use of unsafe block", rules.NewUsingUnsafe},
|
||||
"G104": RuleInfo{"Audit errors not checked", rules.NewNoErrorCheck},
|
||||
"G105": RuleInfo{"Audit the use of big.Exp function", rules.NewUsingBigExp},
|
||||
|
||||
// injection
|
||||
"G201": RuleInfo{"SQL query construction using format string", rules.NewSqlStrFormat},
|
||||
"G202": RuleInfo{"SQL query construction using string concatenation", rules.NewSqlStrConcat},
|
||||
"G203": RuleInfo{"Use of unescaped data in HTML templates", rules.NewTemplateCheck},
|
||||
"G204": RuleInfo{"Audit use of command execution", rules.NewSubproc},
|
||||
|
||||
// filesystem
|
||||
"G301": RuleInfo{"Poor file permissions used when creating a directory", rules.NewMkdirPerms},
|
||||
"G302": RuleInfo{"Poor file permisions used when creation file or using chmod", rules.NewFilePerms},
|
||||
"G303": RuleInfo{"Creating tempfile using a predictable path", rules.NewBadTempFile},
|
||||
|
||||
// crypto
|
||||
"G401": RuleInfo{"Detect the usage of DES, RC4, or MD5", rules.NewUsesWeakCryptography},
|
||||
"G402": RuleInfo{"Look for bad TLS connection settings", rules.NewIntermediateTlsCheck},
|
||||
"G403": RuleInfo{"Ensure minimum RSA key length of 2048 bits", rules.NewWeakKeyStrength},
|
||||
"G404": RuleInfo{"Insecure random number source (rand)", rules.NewWeakRandCheck},
|
||||
|
||||
// blacklist
|
||||
"G501": RuleInfo{"Import blacklist: crypto/md5", rules.NewBlacklist_crypto_md5},
|
||||
"G502": RuleInfo{"Import blacklist: crypto/des", rules.NewBlacklist_crypto_des},
|
||||
"G503": RuleInfo{"Import blacklist: crypto/rc4", rules.NewBlacklist_crypto_rc4},
|
||||
"G504": RuleInfo{"Import blacklist: net/http/cgi", rules.NewBlacklist_net_http_cgi},
|
||||
}
|
||||
}
|
||||
|
||||
func AddRules(analyzer *gas.Analyzer, conf map[string]interface{}) {
|
||||
var all map[string]RuleInfo
|
||||
|
||||
inc := conf["include"].([]string)
|
||||
exc := conf["exclude"].([]string)
|
||||
|
||||
// add included rules
|
||||
if len(inc) == 0 {
|
||||
all = GetFullRuleList()
|
||||
} else {
|
||||
all = map[string]RuleInfo{}
|
||||
tmp := GetFullRuleList()
|
||||
for _, v := range inc {
|
||||
if val, ok := tmp[v]; ok {
|
||||
all[v] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove excluded rules
|
||||
for _, v := range exc {
|
||||
delete(all, v)
|
||||
}
|
||||
|
||||
for _, v := range all {
|
||||
analyzer.AddRule(v.build(conf))
|
||||
}
|
||||
}
|
44
tools/vendor/github.com/GoASTScanner/gas/rules/big.go
generated
vendored
Normal file
44
tools/vendor/github.com/GoASTScanner/gas/rules/big.go
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
)
|
||||
|
||||
type UsingBigExp struct {
|
||||
gas.MetaData
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func (r *UsingBigExp) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if _, matched := gas.MatchCallByType(n, c, r.pkg, r.calls...); matched {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
func NewUsingBigExp(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &UsingBigExp{
|
||||
pkg: "*math/big.Int",
|
||||
calls: []string{"Exp"},
|
||||
MetaData: gas.MetaData{
|
||||
What: "Use of math/big.Int.Exp function should be audited for modulus == 0",
|
||||
Severity: gas.Low,
|
||||
Confidence: gas.High,
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
52
tools/vendor/github.com/GoASTScanner/gas/rules/bind.go
generated
vendored
Normal file
52
tools/vendor/github.com/GoASTScanner/gas/rules/bind.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
// Looks for net.Listen("0.0.0.0") or net.Listen(":8080")
|
||||
type BindsToAllNetworkInterfaces struct {
|
||||
gas.MetaData
|
||||
call *regexp.Regexp
|
||||
pattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (r *BindsToAllNetworkInterfaces) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, r.call); node != nil {
|
||||
if arg, err := gas.GetString(node.Args[1]); err == nil {
|
||||
if r.pattern.MatchString(arg) {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func NewBindsToAllNetworkInterfaces(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BindsToAllNetworkInterfaces{
|
||||
call: regexp.MustCompile(`^(net|tls)\.Listen$`),
|
||||
pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "Binds to all network interfaces",
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
79
tools/vendor/github.com/GoASTScanner/gas/rules/blacklist.go
generated
vendored
Normal file
79
tools/vendor/github.com/GoASTScanner/gas/rules/blacklist.go
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type BlacklistImport struct {
|
||||
gas.MetaData
|
||||
Path string
|
||||
}
|
||||
|
||||
func (r *BlacklistImport) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node, ok := n.(*ast.ImportSpec); ok {
|
||||
if r.Path == node.Path.Value && node.Name.String() != "_" {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_md5(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/md5"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_des(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/des"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
}
|
||||
|
||||
func NewBlacklist_crypto_rc4(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
Path: `"crypto/rc4"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
}
|
||||
|
||||
func NewBlacklist_net_http_cgi(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BlacklistImport{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.High,
|
||||
What: "Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)",
|
||||
},
|
||||
Path: `"net/http/cgi"`,
|
||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||
}
|
96
tools/vendor/github.com/GoASTScanner/gas/rules/errors.go
generated
vendored
Normal file
96
tools/vendor/github.com/GoASTScanner/gas/rules/errors.go
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
type NoErrorCheck struct {
|
||||
gas.MetaData
|
||||
whitelist gas.CallList
|
||||
}
|
||||
|
||||
func returnsError(callExpr *ast.CallExpr, ctx *gas.Context) int {
|
||||
if tv := ctx.Info.TypeOf(callExpr); tv != nil {
|
||||
switch t := tv.(type) {
|
||||
case *types.Tuple:
|
||||
for pos := 0; pos < t.Len(); pos += 1 {
|
||||
variable := t.At(pos)
|
||||
if variable != nil && variable.Type().String() == "error" {
|
||||
return pos
|
||||
}
|
||||
}
|
||||
case *types.Named:
|
||||
if t.String() == "error" {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
switch stmt := n.(type) {
|
||||
case *ast.AssignStmt:
|
||||
for _, expr := range stmt.Rhs {
|
||||
if callExpr, ok := expr.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
||||
pos := returnsError(callExpr, ctx)
|
||||
if pos < 0 || pos >= len(stmt.Lhs) {
|
||||
return nil, nil
|
||||
}
|
||||
if id, ok := stmt.Lhs[pos].(*ast.Ident); ok && id.Name == "_" {
|
||||
return gas.NewIssue(ctx, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
case *ast.ExprStmt:
|
||||
if callExpr, ok := stmt.X.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
||||
pos := returnsError(callExpr, ctx)
|
||||
if pos >= 0 {
|
||||
return gas.NewIssue(ctx, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewNoErrorCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
|
||||
// TODO(gm) Come up with sensible defaults here. Or flip it to use a
|
||||
// black list instead.
|
||||
whitelist := gas.NewCallList()
|
||||
whitelist.AddAll("bytes.Buffer", "Write", "WriteByte", "WriteRune", "WriteString")
|
||||
whitelist.AddAll("fmt", "Print", "Printf", "Println")
|
||||
whitelist.Add("io.PipeWriter", "CloseWithError")
|
||||
|
||||
if configured, ok := conf["G104"]; ok {
|
||||
if whitelisted, ok := configured.(map[string][]string); ok {
|
||||
for key, val := range whitelisted {
|
||||
whitelist.AddAll(key, val...)
|
||||
}
|
||||
}
|
||||
}
|
||||
return &NoErrorCheck{
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Low,
|
||||
Confidence: gas.High,
|
||||
What: "Errors unhandled.",
|
||||
},
|
||||
whitelist: whitelist,
|
||||
}, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)}
|
||||
}
|
85
tools/vendor/github.com/GoASTScanner/gas/rules/fileperms.go
generated
vendored
Normal file
85
tools/vendor/github.com/GoASTScanner/gas/rules/fileperms.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"strconv"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type FilePermissions struct {
|
||||
gas.MetaData
|
||||
mode int64
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 {
|
||||
var mode int64 = defaultMode
|
||||
if value, ok := conf[configKey]; ok {
|
||||
switch value.(type) {
|
||||
case int64:
|
||||
mode = value.(int64)
|
||||
case string:
|
||||
if m, e := strconv.ParseInt(value.(string), 0, 64); e != nil {
|
||||
mode = defaultMode
|
||||
} else {
|
||||
mode = m
|
||||
}
|
||||
}
|
||||
}
|
||||
return mode
|
||||
}
|
||||
|
||||
func (r *FilePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if callexpr, matched := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matched {
|
||||
modeArg := callexpr.Args[len(callexpr.Args)-1]
|
||||
if mode, err := gas.GetInt(modeArg); err == nil && mode > r.mode {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewFilePerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
mode := getConfiguredMode(conf, "G302", 0600)
|
||||
return &FilePermissions{
|
||||
mode: mode,
|
||||
pkg: "os",
|
||||
calls: []string{"OpenFile", "Chmod"},
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: fmt.Sprintf("Expect file permissions to be %#o or less", mode),
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
||||
|
||||
func NewMkdirPerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
mode := getConfiguredMode(conf, "G301", 0700)
|
||||
return &FilePermissions{
|
||||
mode: mode,
|
||||
pkg: "os",
|
||||
calls: []string{"Mkdir", "MkdirAll"},
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: fmt.Sprintf("Expect directory permissions to be %#o or less", mode),
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
148
tools/vendor/github.com/GoASTScanner/gas/rules/hardcoded_credentials.go
generated
vendored
Normal file
148
tools/vendor/github.com/GoASTScanner/gas/rules/hardcoded_credentials.go
generated
vendored
Normal file
@ -0,0 +1,148 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"regexp"
|
||||
|
||||
"github.com/nbutton23/zxcvbn-go"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Credentials struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
entropyThreshold float64
|
||||
perCharThreshold float64
|
||||
truncate int
|
||||
ignoreEntropy bool
|
||||
}
|
||||
|
||||
func truncate(s string, n int) string {
|
||||
if n > len(s) {
|
||||
return s
|
||||
}
|
||||
return s[:n]
|
||||
}
|
||||
|
||||
func (r *Credentials) isHighEntropyString(str string) bool {
|
||||
s := truncate(str, r.truncate)
|
||||
info := zxcvbn.PasswordStrength(s, []string{})
|
||||
entropyPerChar := info.Entropy / float64(len(s))
|
||||
return (info.Entropy >= r.entropyThreshold ||
|
||||
(info.Entropy >= (r.entropyThreshold/2) &&
|
||||
entropyPerChar >= r.perCharThreshold))
|
||||
}
|
||||
|
||||
func (r *Credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||
switch node := n.(type) {
|
||||
case *ast.AssignStmt:
|
||||
return r.matchAssign(node, ctx)
|
||||
case *ast.GenDecl:
|
||||
return r.matchGenDecl(node, ctx)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *Credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*gas.Issue, error) {
|
||||
for _, i := range assign.Lhs {
|
||||
if ident, ok := i.(*ast.Ident); ok {
|
||||
if r.pattern.MatchString(ident.Name) {
|
||||
for _, e := range assign.Rhs {
|
||||
if val, err := gas.GetString(e); err == nil {
|
||||
if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) {
|
||||
return gas.NewIssue(ctx, assign, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *Credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Issue, error) {
|
||||
if decl.Tok != token.CONST && decl.Tok != token.VAR {
|
||||
return nil, nil
|
||||
}
|
||||
for _, spec := range decl.Specs {
|
||||
if valueSpec, ok := spec.(*ast.ValueSpec); ok {
|
||||
for index, ident := range valueSpec.Names {
|
||||
if r.pattern.MatchString(ident.Name) && valueSpec.Values != nil {
|
||||
// const foo, bar = "same value"
|
||||
if len(valueSpec.Values) <= index {
|
||||
index = len(valueSpec.Values) - 1
|
||||
}
|
||||
if val, err := gas.GetString(valueSpec.Values[index]); err == nil {
|
||||
if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) {
|
||||
return gas.NewIssue(ctx, valueSpec, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewHardcodedCredentials(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
pattern := `(?i)passwd|pass|password|pwd|secret|token`
|
||||
entropyThreshold := 80.0
|
||||
perCharThreshold := 3.0
|
||||
ignoreEntropy := false
|
||||
var truncateString int = 16
|
||||
if val, ok := conf["G101"]; ok {
|
||||
conf := val.(map[string]string)
|
||||
if configPattern, ok := conf["pattern"]; ok {
|
||||
pattern = configPattern
|
||||
}
|
||||
if configIgnoreEntropy, ok := conf["ignore_entropy"]; ok {
|
||||
if parsedBool, err := strconv.ParseBool(configIgnoreEntropy); err == nil {
|
||||
ignoreEntropy = parsedBool
|
||||
}
|
||||
}
|
||||
if configEntropyThreshold, ok := conf["entropy_threshold"]; ok {
|
||||
if parsedNum, err := strconv.ParseFloat(configEntropyThreshold, 64); err == nil {
|
||||
entropyThreshold = parsedNum
|
||||
}
|
||||
}
|
||||
if configCharThreshold, ok := conf["per_char_threshold"]; ok {
|
||||
if parsedNum, err := strconv.ParseFloat(configCharThreshold, 64); err == nil {
|
||||
perCharThreshold = parsedNum
|
||||
}
|
||||
}
|
||||
if configTruncate, ok := conf["truncate"]; ok {
|
||||
if parsedInt, err := strconv.Atoi(configTruncate); err == nil {
|
||||
truncateString = parsedInt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &Credentials{
|
||||
pattern: regexp.MustCompile(pattern),
|
||||
entropyThreshold: entropyThreshold,
|
||||
perCharThreshold: perCharThreshold,
|
||||
ignoreEntropy: ignoreEntropy,
|
||||
truncate: truncateString,
|
||||
MetaData: gas.MetaData{
|
||||
What: "Potential hardcoded credentials",
|
||||
Confidence: gas.Low,
|
||||
Severity: gas.High,
|
||||
},
|
||||
}, []ast.Node{(*ast.AssignStmt)(nil), (*ast.GenDecl)(nil)}
|
||||
}
|
49
tools/vendor/github.com/GoASTScanner/gas/rules/rand.go
generated
vendored
Normal file
49
tools/vendor/github.com/GoASTScanner/gas/rules/rand.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type WeakRand struct {
|
||||
gas.MetaData
|
||||
funcNames []string
|
||||
packagePath string
|
||||
}
|
||||
|
||||
func (w *WeakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
for _, funcName := range w.funcNames {
|
||||
if _, matched := gas.MatchCallByPackage(n, c, w.packagePath, funcName); matched {
|
||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewWeakRandCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &WeakRand{
|
||||
funcNames: []string{"Read", "Int"},
|
||||
packagePath: "math/rand",
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.High,
|
||||
Confidence: gas.Medium,
|
||||
What: "Use of weak random number generator (math/rand instead of crypto/rand)",
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
51
tools/vendor/github.com/GoASTScanner/gas/rules/rsa.go
generated
vendored
Normal file
51
tools/vendor/github.com/GoASTScanner/gas/rules/rsa.go
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type WeakKeyStrength struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
bits int
|
||||
}
|
||||
|
||||
func (w *WeakKeyStrength) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := gas.MatchCall(n, w.pattern); node != nil {
|
||||
if bits, err := gas.GetInt(node.Args[1]); err == nil && bits < (int64)(w.bits) {
|
||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewWeakKeyStrength(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
bits := 2048
|
||||
return &WeakKeyStrength{
|
||||
pattern: regexp.MustCompile(`^rsa\.GenerateKey$`),
|
||||
bits: bits,
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: fmt.Sprintf("RSA keys should be at least %d bits", bits),
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
99
tools/vendor/github.com/GoASTScanner/gas/rules/sql.go
generated
vendored
Normal file
99
tools/vendor/github.com/GoASTScanner/gas/rules/sql.go
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type SqlStatement struct {
|
||||
gas.MetaData
|
||||
pattern *regexp.Regexp
|
||||
}
|
||||
|
||||
type SqlStrConcat struct {
|
||||
SqlStatement
|
||||
}
|
||||
|
||||
// see if we can figure out what it is
|
||||
func (s *SqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||
if n.Obj != nil {
|
||||
return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Look for "SELECT * FROM table WHERE " + " ' OR 1=1"
|
||||
func (s *SqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node, ok := n.(*ast.BinaryExpr); ok {
|
||||
if start, ok := node.X.(*ast.BasicLit); ok {
|
||||
if str, e := gas.GetString(start); s.pattern.MatchString(str) && e == nil {
|
||||
if _, ok := node.Y.(*ast.BasicLit); ok {
|
||||
return nil, nil // string cat OK
|
||||
}
|
||||
if second, ok := node.Y.(*ast.Ident); ok && s.checkObject(second) {
|
||||
return nil, nil
|
||||
}
|
||||
return gas.NewIssue(c, n, s.What, s.Severity, s.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSqlStrConcat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &SqlStrConcat{
|
||||
SqlStatement: SqlStatement{
|
||||
pattern: regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "SQL string concatenation",
|
||||
},
|
||||
},
|
||||
}, []ast.Node{(*ast.BinaryExpr)(nil)}
|
||||
}
|
||||
|
||||
type SqlStrFormat struct {
|
||||
SqlStatement
|
||||
call *regexp.Regexp
|
||||
}
|
||||
|
||||
// Looks for "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)"
|
||||
func (s *SqlStrFormat) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, s.call); node != nil {
|
||||
if arg, e := gas.GetString(node.Args[0]); s.pattern.MatchString(arg) && e == nil {
|
||||
return gas.NewIssue(c, n, s.What, s.Severity, s.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSqlStrFormat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &SqlStrFormat{
|
||||
call: regexp.MustCompile(`^fmt\.Sprintf$`),
|
||||
SqlStatement: SqlStatement{
|
||||
pattern: regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "SQL string formatting",
|
||||
},
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
56
tools/vendor/github.com/GoASTScanner/gas/rules/subproc.go
generated
vendored
Normal file
56
tools/vendor/github.com/GoASTScanner/gas/rules/subproc.go
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type Subprocess struct {
|
||||
pattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (r *Subprocess) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
if node := gas.MatchCall(n, r.pattern); node != nil {
|
||||
for _, arg := range node.Args {
|
||||
if !gas.TryResolve(arg, c) {
|
||||
what := "Subprocess launching with variable."
|
||||
return gas.NewIssue(c, n, what, gas.High, gas.High), nil
|
||||
}
|
||||
}
|
||||
|
||||
// call with partially qualified command
|
||||
if str, err := gas.GetString(node.Args[0]); err == nil {
|
||||
if !strings.HasPrefix(str, "/") {
|
||||
what := "Subprocess launching with partial path."
|
||||
return gas.NewIssue(c, n, what, gas.Medium, gas.High), nil
|
||||
}
|
||||
}
|
||||
|
||||
what := "Subprocess launching should be audited."
|
||||
return gas.NewIssue(c, n, what, gas.Low, gas.High), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewSubproc(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &Subprocess{
|
||||
pattern: regexp.MustCompile(`^exec\.Command|syscall\.Exec$`),
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
49
tools/vendor/github.com/GoASTScanner/gas/rules/tempfiles.go
generated
vendored
Normal file
49
tools/vendor/github.com/GoASTScanner/gas/rules/tempfiles.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type BadTempFile struct {
|
||||
gas.MetaData
|
||||
args *regexp.Regexp
|
||||
call *regexp.Regexp
|
||||
}
|
||||
|
||||
func (t *BadTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, t.call); node != nil {
|
||||
if arg, e := gas.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil {
|
||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewBadTempFile(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &BadTempFile{
|
||||
call: regexp.MustCompile(`ioutil\.WriteFile|os\.Create`),
|
||||
args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "File creation in shared tmp directory without using ioutil.Tempfile",
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
49
tools/vendor/github.com/GoASTScanner/gas/rules/templates.go
generated
vendored
Normal file
49
tools/vendor/github.com/GoASTScanner/gas/rules/templates.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type TemplateCheck struct {
|
||||
gas.MetaData
|
||||
call *regexp.Regexp
|
||||
}
|
||||
|
||||
func (t *TemplateCheck) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCall(n, t.call); node != nil {
|
||||
for _, arg := range node.Args {
|
||||
if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe
|
||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewTemplateCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &TemplateCheck{
|
||||
call: regexp.MustCompile(`^template\.(HTML|JS|URL)$`),
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.Low,
|
||||
What: "this method will not auto-escape HTML. Verify data is well formed.",
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
191
tools/vendor/github.com/GoASTScanner/gas/rules/tls.go
generated
vendored
Normal file
191
tools/vendor/github.com/GoASTScanner/gas/rules/tls.go
generated
vendored
Normal file
@ -0,0 +1,191 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
"regexp"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type InsecureConfigTLS struct {
|
||||
MinVersion int16
|
||||
MaxVersion int16
|
||||
pattern *regexp.Regexp
|
||||
goodCiphers []string
|
||||
}
|
||||
|
||||
func stringInSlice(a string, list []string) bool {
|
||||
for _, b := range list {
|
||||
if b == a {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) processTlsCipherSuites(n ast.Node, c *gas.Context) *gas.Issue {
|
||||
a := reflect.TypeOf(&ast.KeyValueExpr{})
|
||||
b := reflect.TypeOf(&ast.CompositeLit{})
|
||||
if node, ok := gas.SimpleSelect(n, a, b).(*ast.CompositeLit); ok {
|
||||
for _, elt := range node.Elts {
|
||||
if ident, ok := elt.(*ast.SelectorExpr); ok {
|
||||
if !stringInSlice(ident.Sel.Name, t.goodCiphers) {
|
||||
str := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name)
|
||||
return gas.NewIssue(c, n, str, gas.High, gas.High)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Context) *gas.Issue {
|
||||
if ident, ok := n.Key.(*ast.Ident); ok {
|
||||
switch ident.Name {
|
||||
case "InsecureSkipVerify":
|
||||
if node, ok := n.Value.(*ast.Ident); ok {
|
||||
if node.Name != "false" {
|
||||
return gas.NewIssue(c, n, "TLS InsecureSkipVerify set true.", gas.High, gas.High)
|
||||
}
|
||||
} else {
|
||||
// TODO(tk): symbol tab look up to get the actual value
|
||||
return gas.NewIssue(c, n, "TLS InsecureSkipVerify may be true.", gas.High, gas.Low)
|
||||
}
|
||||
|
||||
case "PreferServerCipherSuites":
|
||||
if node, ok := n.Value.(*ast.Ident); ok {
|
||||
if node.Name == "false" {
|
||||
return gas.NewIssue(c, n, "TLS PreferServerCipherSuites set false.", gas.Medium, gas.High)
|
||||
}
|
||||
} else {
|
||||
// TODO(tk): symbol tab look up to get the actual value
|
||||
return gas.NewIssue(c, n, "TLS PreferServerCipherSuites may be false.", gas.Medium, gas.Low)
|
||||
}
|
||||
|
||||
case "MinVersion":
|
||||
if ival, ierr := gas.GetInt(n.Value); ierr == nil {
|
||||
if (int16)(ival) < t.MinVersion {
|
||||
return gas.NewIssue(c, n, "TLS MinVersion too low.", gas.High, gas.High)
|
||||
}
|
||||
// TODO(tk): symbol tab look up to get the actual value
|
||||
return gas.NewIssue(c, n, "TLS MinVersion may be too low.", gas.High, gas.Low)
|
||||
}
|
||||
|
||||
case "MaxVersion":
|
||||
if ival, ierr := gas.GetInt(n.Value); ierr == nil {
|
||||
if (int16)(ival) < t.MaxVersion {
|
||||
return gas.NewIssue(c, n, "TLS MaxVersion too low.", gas.High, gas.High)
|
||||
}
|
||||
// TODO(tk): symbol tab look up to get the actual value
|
||||
return gas.NewIssue(c, n, "TLS MaxVersion may be too low.", gas.High, gas.Low)
|
||||
}
|
||||
|
||||
case "CipherSuites":
|
||||
if ret := t.processTlsCipherSuites(n, c); ret != nil {
|
||||
return ret
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *InsecureConfigTLS) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if node := gas.MatchCompLit(n, t.pattern); node != nil {
|
||||
for _, elt := range node.Elts {
|
||||
if kve, ok := elt.(*ast.KeyValueExpr); ok {
|
||||
gi = t.processTlsConfVal(kve, c)
|
||||
if gi != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func NewModernTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0303, // TLS 1.2 only
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
func NewIntermediateTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
||||
|
||||
func NewCompatTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Old_compatibility_.28default.29
|
||||
return &InsecureConfigTLS{
|
||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||
MaxVersion: 0x0303,
|
||||
goodCiphers: []string{
|
||||
"TLS_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_RC4_128_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
},
|
||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||
}
|
45
tools/vendor/github.com/GoASTScanner/gas/rules/unsafe.go
generated
vendored
Normal file
45
tools/vendor/github.com/GoASTScanner/gas/rules/unsafe.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
"go/ast"
|
||||
)
|
||||
|
||||
type UsingUnsafe struct {
|
||||
gas.MetaData
|
||||
pkg string
|
||||
calls []string
|
||||
}
|
||||
|
||||
func (r *UsingUnsafe) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||
if _, matches := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matches {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func NewUsingUnsafe(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
return &UsingUnsafe{
|
||||
pkg: "unsafe",
|
||||
calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"},
|
||||
MetaData: gas.MetaData{
|
||||
What: "Use of unsafe calls should be audited",
|
||||
Severity: gas.Low,
|
||||
Confidence: gas.High,
|
||||
},
|
||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
53
tools/vendor/github.com/GoASTScanner/gas/rules/weakcrypto.go
generated
vendored
Normal file
53
tools/vendor/github.com/GoASTScanner/gas/rules/weakcrypto.go
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
|
||||
gas "github.com/GoASTScanner/gas/core"
|
||||
)
|
||||
|
||||
type UsesWeakCryptography struct {
|
||||
gas.MetaData
|
||||
blacklist map[string][]string
|
||||
}
|
||||
|
||||
func (r *UsesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||
|
||||
for pkg, funcs := range r.blacklist {
|
||||
if _, matched := gas.MatchCallByPackage(n, c, pkg, funcs...); matched {
|
||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Uses des.* md5.* or rc4.*
|
||||
func NewUsesWeakCryptography(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||
calls := make(map[string][]string)
|
||||
calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"}
|
||||
calls["crypto/md5"] = []string{"New", "Sum"}
|
||||
calls["crypto/rc4"] = []string{"NewCipher"}
|
||||
rule := &UsesWeakCryptography{
|
||||
blacklist: calls,
|
||||
MetaData: gas.MetaData{
|
||||
Severity: gas.Medium,
|
||||
Confidence: gas.High,
|
||||
What: "Use of weak cryptographic primitive",
|
||||
},
|
||||
}
|
||||
return rule, []ast.Node{(*ast.CallExpr)(nil)}
|
||||
}
|
276
tools/vendor/github.com/GoASTScanner/gas/tools.go
generated
vendored
Normal file
276
tools/vendor/github.com/GoASTScanner/gas/tools.go
generated
vendored
Normal file
@ -0,0 +1,276 @@
|
||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/importer"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type command func(args ...string)
|
||||
type utilities struct {
|
||||
commands map[string]command
|
||||
call []string
|
||||
}
|
||||
|
||||
// Custom commands / utilities to run instead of default analyzer
|
||||
func newUtils() *utilities {
|
||||
utils := make(map[string]command)
|
||||
utils["ast"] = dumpAst
|
||||
utils["callobj"] = dumpCallObj
|
||||
utils["uses"] = dumpUses
|
||||
utils["types"] = dumpTypes
|
||||
utils["defs"] = dumpDefs
|
||||
utils["comments"] = dumpComments
|
||||
utils["imports"] = dumpImports
|
||||
return &utilities{utils, make([]string, 0)}
|
||||
}
|
||||
|
||||
func (u *utilities) String() string {
|
||||
i := 0
|
||||
keys := make([]string, len(u.commands))
|
||||
for k := range u.commands {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
return strings.Join(keys, ", ")
|
||||
}
|
||||
|
||||
func (u *utilities) Set(opt string) error {
|
||||
if _, ok := u.commands[opt]; !ok {
|
||||
return fmt.Errorf("valid tools are: %s", u.String())
|
||||
|
||||
}
|
||||
u.call = append(u.call, opt)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *utilities) run(args ...string) {
|
||||
for _, util := range u.call {
|
||||
if cmd, ok := u.commands[util]; ok {
|
||||
cmd(args...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func shouldSkip(path string) bool {
|
||||
st, e := os.Stat(path)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Skipping: %s - %s\n", path, e)
|
||||
return true
|
||||
}
|
||||
if st.IsDir() {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Skipping: %s - directory\n", path)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func dumpAst(files ...string) {
|
||||
for _, arg := range files {
|
||||
// Ensure file exists and not a directory
|
||||
if shouldSkip(arg) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create the AST by parsing src.
|
||||
fset := token.NewFileSet() // positions are relative to fset
|
||||
f, err := parser.ParseFile(fset, arg, nil, 0)
|
||||
if err != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Unable to parse file %s\n", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Print the AST. #nosec
|
||||
ast.Print(fset, f)
|
||||
}
|
||||
}
|
||||
|
||||
type context struct {
|
||||
fileset *token.FileSet
|
||||
comments ast.CommentMap
|
||||
info *types.Info
|
||||
pkg *types.Package
|
||||
config *types.Config
|
||||
root *ast.File
|
||||
}
|
||||
|
||||
func createContext(filename string) *context {
|
||||
fileset := token.NewFileSet()
|
||||
root, e := parser.ParseFile(fileset, filename, nil, parser.ParseComments)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Unable to parse file: %s. Reason: %s\n", filename, e)
|
||||
return nil
|
||||
}
|
||||
comments := ast.NewCommentMap(fileset, root, root.Comments)
|
||||
info := &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
Defs: make(map[*ast.Ident]types.Object),
|
||||
Uses: make(map[*ast.Ident]types.Object),
|
||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||
Scopes: make(map[ast.Node]*types.Scope),
|
||||
Implicits: make(map[ast.Node]types.Object),
|
||||
}
|
||||
config := types.Config{Importer: importer.Default()}
|
||||
pkg, e := config.Check("main.go", fileset, []*ast.File{root}, info)
|
||||
if e != nil {
|
||||
// #nosec
|
||||
fmt.Fprintf(os.Stderr, "Type check failed for file: %s. Reason: %s\n", filename, e)
|
||||
return nil
|
||||
}
|
||||
return &context{fileset, comments, info, pkg, &config, root}
|
||||
}
|
||||
|
||||
func printObject(obj types.Object) {
|
||||
fmt.Println("OBJECT")
|
||||
if obj == nil {
|
||||
fmt.Println("object is nil")
|
||||
return
|
||||
}
|
||||
fmt.Printf(" Package = %v\n", obj.Pkg())
|
||||
if obj.Pkg() != nil {
|
||||
fmt.Println(" Path = ", obj.Pkg().Path())
|
||||
fmt.Println(" Name = ", obj.Pkg().Name())
|
||||
fmt.Println(" String = ", obj.Pkg().String())
|
||||
}
|
||||
fmt.Printf(" Name = %v\n", obj.Name())
|
||||
fmt.Printf(" Type = %v\n", obj.Type())
|
||||
fmt.Printf(" Id = %v\n", obj.Id())
|
||||
}
|
||||
|
||||
func checkContext(ctx *context, file string) bool {
|
||||
// #nosec
|
||||
if ctx == nil {
|
||||
fmt.Fprintln(os.Stderr, "Failed to create context for file: ", file)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func dumpCallObj(files ...string) {
|
||||
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
ast.Inspect(context.root, func(n ast.Node) bool {
|
||||
var obj types.Object
|
||||
switch node := n.(type) {
|
||||
case *ast.Ident:
|
||||
obj = context.info.ObjectOf(node) //context.info.Uses[node]
|
||||
case *ast.SelectorExpr:
|
||||
obj = context.info.ObjectOf(node.Sel) //context.info.Uses[node.Sel]
|
||||
default:
|
||||
obj = nil
|
||||
}
|
||||
if obj != nil {
|
||||
printObject(obj)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func dumpUses(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for ident, obj := range context.info.Uses {
|
||||
fmt.Printf("IDENT: %v, OBJECT: %v\n", ident, obj)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpTypes(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for expr, tv := range context.info.Types {
|
||||
fmt.Printf("EXPR: %v, TYPE: %v\n", expr, tv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpDefs(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for ident, obj := range context.info.Defs {
|
||||
fmt.Printf("IDENT: %v, OBJ: %v\n", ident, obj)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpComments(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for _, group := range context.comments.Comments() {
|
||||
fmt.Println(group.Text())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func dumpImports(files ...string) {
|
||||
for _, file := range files {
|
||||
if shouldSkip(file) {
|
||||
continue
|
||||
}
|
||||
context := createContext(file)
|
||||
if !checkContext(context, file) {
|
||||
return
|
||||
}
|
||||
for _, pkg := range context.pkg.Imports() {
|
||||
fmt.Println(pkg.Path(), pkg.Name())
|
||||
for _, name := range pkg.Scope().Names() {
|
||||
fmt.Println(" => ", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
7
tools/vendor/github.com/GoASTScanner/gas/vendor.conf
generated
vendored
Normal file
7
tools/vendor/github.com/GoASTScanner/gas/vendor.conf
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
# package
|
||||
github.com/GoAstScanner/gas
|
||||
|
||||
# import
|
||||
github.com/GoASTScanner/gas cc52ef5
|
||||
github.com/nbutton23/zxcvbn-go a22cb81
|
||||
github.com/ryanuber/go-glob v0.1
|
27
tools/vendor/github.com/alecthomas/gocyclo/LICENSE
generated
vendored
Normal file
27
tools/vendor/github.com/alecthomas/gocyclo/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2013 Frederik Zipp. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of the copyright owner nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
tools/vendor/github.com/alecthomas/gocyclo/README.md
generated
vendored
Normal file
31
tools/vendor/github.com/alecthomas/gocyclo/README.md
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
Gocyclo calculates cyclomatic complexities of functions in Go source code.
|
||||
|
||||
The cyclomatic complexity of a function is calculated according to the
|
||||
following rules:
|
||||
|
||||
1 is the base complexity of a function
|
||||
+1 for each 'if', 'for', 'case', '&&' or '||'
|
||||
|
||||
To install, run
|
||||
|
||||
$ go get github.com/fzipp/gocyclo
|
||||
|
||||
and put the resulting binary in one of your PATH directories if
|
||||
`$GOPATH/bin` isn't already in your PATH.
|
||||
|
||||
Usage:
|
||||
|
||||
$ gocyclo [<flag> ...] <Go file or directory> ...
|
||||
|
||||
Examples:
|
||||
|
||||
$ gocyclo .
|
||||
$ gocyclo main.go
|
||||
$ gocyclo -top 10 src/
|
||||
$ gocyclo -over 25 docker
|
||||
$ gocyclo -avg .
|
||||
|
||||
The output fields for each line are:
|
||||
|
||||
<complexity> <package> <function> <file:row:column>
|
||||
|
222
tools/vendor/github.com/alecthomas/gocyclo/gocyclo.go
generated
vendored
Normal file
222
tools/vendor/github.com/alecthomas/gocyclo/gocyclo.go
generated
vendored
Normal file
@ -0,0 +1,222 @@
|
||||
// Copyright 2013 Frederik Zipp. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Gocyclo calculates the cyclomatic complexities of functions and
|
||||
// methods in Go source code.
|
||||
//
|
||||
// Usage:
|
||||
// gocyclo [<flag> ...] <Go file or directory> ...
|
||||
//
|
||||
// Flags
|
||||
// -over N show functions with complexity > N only and
|
||||
// return exit code 1 if the output is non-empty
|
||||
// -top N show the top N most complex functions only
|
||||
// -avg show the average complexity
|
||||
//
|
||||
// The output fields for each line are:
|
||||
// <complexity> <package> <function> <file:row:column>
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
)
|
||||
|
||||
const usageDoc = `Calculate cyclomatic complexities of Go functions.
|
||||
usage:
|
||||
gocyclo [<flag> ...] <Go file or directory> ...
|
||||
|
||||
Flags
|
||||
-over N show functions with complexity > N only and
|
||||
return exit code 1 if the set is non-empty
|
||||
-top N show the top N most complex functions only
|
||||
-avg show the average complexity over all functions,
|
||||
not depending on whether -over or -top are set
|
||||
|
||||
The output fields for each line are:
|
||||
<complexity> <package> <function> <file:row:column>
|
||||
`
|
||||
|
||||
func usage() {
|
||||
fmt.Fprintf(os.Stderr, usageDoc)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
var (
|
||||
over = flag.Int("over", 0, "show functions with complexity > N only")
|
||||
top = flag.Int("top", -1, "show the top N most complex functions only")
|
||||
avg = flag.Bool("avg", false, "show the average complexity")
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
if len(args) == 0 {
|
||||
usage()
|
||||
}
|
||||
|
||||
stats := analyze(args)
|
||||
sort.Sort(byComplexity(stats))
|
||||
written := writeStats(os.Stdout, stats)
|
||||
|
||||
if *avg {
|
||||
showAverage(stats)
|
||||
}
|
||||
|
||||
if *over > 0 && written > 0 {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func analyze(paths []string) []stat {
|
||||
stats := make([]stat, 0)
|
||||
for _, path := range paths {
|
||||
if isDir(path) {
|
||||
stats = analyzeDir(path, stats)
|
||||
} else {
|
||||
stats = analyzeFile(path, stats)
|
||||
}
|
||||
}
|
||||
return stats
|
||||
}
|
||||
|
||||
func isDir(filename string) bool {
|
||||
fi, err := os.Stat(filename)
|
||||
return err == nil && fi.IsDir()
|
||||
}
|
||||
|
||||
func analyzeFile(fname string, stats []stat) []stat {
|
||||
fset := token.NewFileSet()
|
||||
f, err := parser.ParseFile(fset, fname, nil, 0)
|
||||
if err != nil {
|
||||
exitError(err)
|
||||
}
|
||||
return buildStats(f, fset, stats)
|
||||
}
|
||||
|
||||
func analyzeDir(dirname string, stats []stat) []stat {
|
||||
files, _ := filepath.Glob(filepath.Join(dirname, "*.go"))
|
||||
for _, file := range files {
|
||||
stats = analyzeFile(file, stats)
|
||||
}
|
||||
return stats
|
||||
}
|
||||
|
||||
func exitError(err error) {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func writeStats(w io.Writer, sortedStats []stat) int {
|
||||
for i, stat := range sortedStats {
|
||||
if i == *top {
|
||||
return i
|
||||
}
|
||||
if stat.Complexity <= *over {
|
||||
return i
|
||||
}
|
||||
fmt.Fprintln(w, stat)
|
||||
}
|
||||
return len(sortedStats)
|
||||
}
|
||||
|
||||
func showAverage(stats []stat) {
|
||||
fmt.Printf("Average: %.3g\n", average(stats))
|
||||
}
|
||||
|
||||
func average(stats []stat) float64 {
|
||||
total := 0
|
||||
for _, s := range stats {
|
||||
total += s.Complexity
|
||||
}
|
||||
return float64(total) / float64(len(stats))
|
||||
}
|
||||
|
||||
type stat struct {
|
||||
PkgName string
|
||||
FuncName string
|
||||
Complexity int
|
||||
Pos token.Position
|
||||
}
|
||||
|
||||
func (s stat) String() string {
|
||||
return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos)
|
||||
}
|
||||
|
||||
type byComplexity []stat
|
||||
|
||||
func (s byComplexity) Len() int { return len(s) }
|
||||
func (s byComplexity) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
func (s byComplexity) Less(i, j int) bool {
|
||||
return s[i].Complexity >= s[j].Complexity
|
||||
}
|
||||
|
||||
func buildStats(f *ast.File, fset *token.FileSet, stats []stat) []stat {
|
||||
for _, decl := range f.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok {
|
||||
stats = append(stats, stat{
|
||||
PkgName: f.Name.Name,
|
||||
FuncName: funcName(fn),
|
||||
Complexity: complexity(fn),
|
||||
Pos: fset.Position(fn.Pos()),
|
||||
})
|
||||
}
|
||||
}
|
||||
return stats
|
||||
}
|
||||
|
||||
// funcName returns the name representation of a function or method:
|
||||
// "(Type).Name" for methods or simply "Name" for functions.
|
||||
func funcName(fn *ast.FuncDecl) string {
|
||||
if fn.Recv != nil {
|
||||
typ := fn.Recv.List[0].Type
|
||||
return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
|
||||
}
|
||||
return fn.Name.Name
|
||||
}
|
||||
|
||||
// recvString returns a string representation of recv of the
|
||||
// form "T", "*T", or "BADRECV" (if not a proper receiver type).
|
||||
func recvString(recv ast.Expr) string {
|
||||
switch t := recv.(type) {
|
||||
case *ast.Ident:
|
||||
return t.Name
|
||||
case *ast.StarExpr:
|
||||
return "*" + recvString(t.X)
|
||||
}
|
||||
return "BADRECV"
|
||||
}
|
||||
|
||||
// complexity calculates the cyclomatic complexity of a function.
|
||||
func complexity(fn *ast.FuncDecl) int {
|
||||
v := complexityVisitor{}
|
||||
ast.Walk(&v, fn)
|
||||
return v.Complexity
|
||||
}
|
||||
|
||||
type complexityVisitor struct {
|
||||
// Complexity is the cyclomatic complexity
|
||||
Complexity int
|
||||
}
|
||||
|
||||
// Visit implements the ast.Visitor interface.
|
||||
func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor {
|
||||
switch n := n.(type) {
|
||||
case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause:
|
||||
v.Complexity++
|
||||
case *ast.BinaryExpr:
|
||||
if n.Op == token.LAND || n.Op == token.LOR {
|
||||
v.Complexity++
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
56
tools/vendor/github.com/alecthomas/gometalinter/CONTRIBUTING.md
generated
vendored
Normal file
56
tools/vendor/github.com/alecthomas/gometalinter/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
### Please only report errors with gometalinter itself
|
||||
|
||||
gometalinter relies on underlying linters to detect issues in source code.
|
||||
If your issue seems to be related to an underlying linter, please report an
|
||||
issue against that linter rather than gometalinter. For a full list of linters
|
||||
and their repositories please see the [README](README.md).
|
||||
|
||||
### Do you want to upgrade a vendored linter?
|
||||
|
||||
Please send a PR. We use [GVT](https://github.com/FiloSottile/gvt). It should be as simple as:
|
||||
|
||||
```
|
||||
go get github.com/FiloSottile/gvt
|
||||
cd _linters
|
||||
gvt update <linter>
|
||||
git add <paths>
|
||||
```
|
||||
|
||||
### Before you report an issue
|
||||
|
||||
Sometimes gometalinter will not report issues that you think it should. There
|
||||
are three things to try in that case:
|
||||
|
||||
#### 1. Update to the latest build of gometalinter and all linters
|
||||
|
||||
go get -u github.com/alecthomas/gometalinter
|
||||
gometalinter --install
|
||||
|
||||
If you're lucky, this will fix the problem.
|
||||
|
||||
#### 2. Analyse the debug output
|
||||
|
||||
If that doesn't help, the problem may be elsewhere (in no particular order):
|
||||
|
||||
1. Upstream linter has changed its output or semantics.
|
||||
2. gometalinter is not invoking the tool correctly.
|
||||
3. gometalinter regular expression matches are not correct for a linter.
|
||||
4. Linter is exceeding the deadline.
|
||||
|
||||
To find out what's going on run in debug mode:
|
||||
|
||||
gometalinter --debug
|
||||
|
||||
This will show all output from the linters and should indicate why it is
|
||||
failing.
|
||||
|
||||
#### 3. Run linters manually
|
||||
|
||||
The output of `gometalinter --debug` should show the exact commands gometalinter
|
||||
is running. Run these commands from the command line to determine if the linter
|
||||
or gometaliner is at fault.
|
||||
|
||||
#### 4. Report an issue.
|
||||
|
||||
Failing all else, if the problem looks like a bug please file an issue and
|
||||
include the output of `gometalinter --debug`
|
19
tools/vendor/github.com/alecthomas/gometalinter/COPYING
generated
vendored
Normal file
19
tools/vendor/github.com/alecthomas/gometalinter/COPYING
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright (C) 2012 Alec Thomas
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
317
tools/vendor/github.com/alecthomas/gometalinter/README.md
generated
vendored
Normal file
317
tools/vendor/github.com/alecthomas/gometalinter/README.md
generated
vendored
Normal file
@ -0,0 +1,317 @@
|
||||
# Go Meta Linter
|
||||
[](https://travis-ci.org/alecthomas/gometalinter) [](https://gitter.im/alecthomas/Lobby)
|
||||
|
||||
<!-- MarkdownTOC -->
|
||||
|
||||
- [Editor integration](#editor-integration)
|
||||
- [Supported linters](#supported-linters)
|
||||
- [Configuration file](#configuration-file)
|
||||
- [Installing](#installing)
|
||||
- [Comment directives](#comment-directives)
|
||||
- [Quickstart](#quickstart)
|
||||
- [FAQ](#faq)
|
||||
- [Exit status](#exit-status)
|
||||
- [What's the best way to use `gometalinter` in CI?](#whats-the-best-way-to-use-gometalinter-in-ci)
|
||||
- [How do I make `gometalinter` work with Go 1.5 vendoring?](#how-do-i-make-gometalinter-work-with-go-15-vendoring)
|
||||
- [Why does `gometalinter --install` install a fork of gocyclo?](#why-does-gometalinter---install-install-a-fork-of-gocyclo)
|
||||
- [Gometalinter is not working](#gometalinter-is-not-working)
|
||||
- [1. Update to the latest build of gometalinter and all linters](#1-update-to-the-latest-build-of-gometalinter-and-all-linters)
|
||||
- [2. Analyse the debug output](#2-analyse-the-debug-output)
|
||||
- [3. Report an issue.](#3-report-an-issue)
|
||||
- [Details](#details)
|
||||
- [Checkstyle XML format](#checkstyle-xml-format)
|
||||
|
||||
<!-- /MarkdownTOC -->
|
||||
|
||||
|
||||
The number of tools for statically checking Go source for errors and warnings
|
||||
is impressive.
|
||||
|
||||
This is a tool that concurrently runs a whole bunch of those linters and
|
||||
normalises their output to a standard format:
|
||||
|
||||
<file>:<line>:[<column>]: <message> (<linter>)
|
||||
|
||||
eg.
|
||||
|
||||
stutter.go:9::warning: unused global variable unusedGlobal (varcheck)
|
||||
stutter.go:12:6:warning: exported type MyStruct should have comment or be unexported (golint)
|
||||
|
||||
It is intended for use with editor/IDE integration.
|
||||
|
||||
## Editor integration
|
||||
|
||||
- [SublimeLinter plugin](https://github.com/alecthomas/SublimeLinter-contrib-gometalinter).
|
||||
- [Atom go-plus package](https://atom.io/packages/go-plus).
|
||||
- [Emacs Flycheck checker](https://github.com/favadi/flycheck-gometalinter).
|
||||
- [Go for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=lukehoban.Go).
|
||||
- Vim/Neovim
|
||||
- [Neomake](https://github.com/neomake/neomake).
|
||||
- [Syntastic](https://github.com/scrooloose/syntastic/wiki/Go:---gometalinter) `let g:syntastic_go_checkers = ['gometalinter']`.
|
||||
- [ale](https://github.com/w0rp/ale) `let g:ale_linters = {'go': ['gometalinter']}`
|
||||
- [vim-go](https://github.com/fatih/vim-go) with the `:GoMetaLinter` command.
|
||||
|
||||
## Supported linters
|
||||
|
||||
- [go vet](https://golang.org/cmd/vet/) - Reports potential errors that otherwise compile.
|
||||
- [go vet --shadow](https://golang.org/cmd/vet/#hdr-Shadowed_variables) - Reports variables that may have been unintentionally shadowed.
|
||||
- [gotype](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis similar to the Go compiler.
|
||||
- [deadcode](https://github.com/tsenart/deadcode) - Finds unused code.
|
||||
- [gocyclo](https://github.com/alecthomas/gocyclo) - Computes the cyclomatic complexity of functions.
|
||||
- [golint](https://github.com/golang/lint) - Google's (mostly stylistic) linter.
|
||||
- [varcheck](https://github.com/opennota/check) - Find unused global variables and constants.
|
||||
- [structcheck](https://github.com/opennota/check) - Find unused struct fields.
|
||||
- [aligncheck](https://github.com/opennota/check) - Warn about un-optimally aligned structures.
|
||||
- [errcheck](https://github.com/kisielk/errcheck) - Check that error return values are used.
|
||||
- [dupl](https://github.com/mibk/dupl) - Reports potentially duplicated code.
|
||||
- [ineffassign](https://github.com/gordonklaus/ineffassign/blob/master/list) - Detect when assignments to *existing* variables are not used.
|
||||
- [interfacer](https://github.com/mvdan/interfacer) - Suggest narrower interfaces that can be used.
|
||||
- [unconvert](https://github.com/mdempsky/unconvert) - Detect redundant type conversions.
|
||||
- [goconst](https://github.com/jgautheron/goconst) - Finds repeated strings that could be replaced by a constant.
|
||||
- [gosimple](https://github.com/dominikh/go-tools/tree/master/cmd/gosimple) - Report simplifications in code.
|
||||
- [staticcheck](https://github.com/dominikh/go-tools/tree/master/cmd/staticcheck) - Statically detect bugs, both obvious and subtle ones.
|
||||
- [gas](https://github.com/GoASTScanner/gas) - Inspects source code for security problems by scanning the Go AST.
|
||||
|
||||
Disabled by default (enable with `--enable=<linter>`):
|
||||
|
||||
- [testify](https://github.com/stretchr/testify) - Show location of failed testify assertions.
|
||||
- [test](http://golang.org/pkg/testing/) - Show location of test failures from the stdlib testing module.
|
||||
- [gofmt -s](https://golang.org/cmd/gofmt/) - Checks if the code is properly formatted and could not be further simplified.
|
||||
- [goimports](https://godoc.org/golang.org/x/tools/cmd/goimports) - Checks missing or unreferenced package imports.
|
||||
- [lll](https://github.com/walle/lll) - Report long lines (see `--line-length=N`).
|
||||
- [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words.
|
||||
- [unparam](https://github.com/mvdan/unparam) - Find unused function parameters.
|
||||
- [unused](https://github.com/dominikh/go-tools/tree/master/cmd/unused) - Find unused variables.
|
||||
- [safesql](https://github.com/stripe/safesql) - Finds potential SQL injection vulnerabilities.
|
||||
|
||||
Additional linters can be added through the command line with `--linter=NAME:COMMAND:PATTERN` (see [below](#details)).
|
||||
|
||||
## Configuration file
|
||||
|
||||
gometalinter now supports a JSON configuration file which can be loaded via
|
||||
`--config=<file>`. The format of this file is determined by the Config struct
|
||||
in `config.go`.
|
||||
|
||||
The configuration file mostly corresponds to command-line flags, with the following exceptions:
|
||||
|
||||
- Linters defined in the configuration file will overlay existing definitions, not replace them.
|
||||
- "Enable" defines the exact set of linters that will be enabled.
|
||||
|
||||
Here is an example configuration file:
|
||||
|
||||
```json
|
||||
{
|
||||
"DisableAll": true,
|
||||
"Enable": ["deadcode", "unconvert"]
|
||||
}
|
||||
```
|
||||
|
||||
## Installing
|
||||
|
||||
There are two options for installing gometalinter.
|
||||
|
||||
1. Install a stable version, eg. `go get -u gopkg.in/alecthomas/gometalinter.v1`.
|
||||
I will generally only tag a new stable version when it has passed the Travis
|
||||
regression tests. The downside is that the binary will be called `gometalinter.v1`.
|
||||
2. Install from HEAD with: `go get -u github.com/alecthomas/gometalinter`.
|
||||
This has the downside that changes to gometalinter may break.
|
||||
|
||||
## Comment directives
|
||||
|
||||
gometalinter supports suppression of linter messages via comment directives. The
|
||||
form of the directive is:
|
||||
|
||||
```
|
||||
// nolint[: <linter>[, <linter>, ...]]
|
||||
```
|
||||
|
||||
Suppression works in the following way:
|
||||
|
||||
1. Line-level suppression
|
||||
|
||||
A comment directive suppresses any linter messages on that line.
|
||||
|
||||
eg. In this example any messages for `a := 10` will be suppressed and errcheck
|
||||
messages for `defer r.Close()` will also be suppressed.
|
||||
|
||||
```go
|
||||
a := 10 // nolint
|
||||
a = 2
|
||||
defer r.Close() // nolint: errcheck
|
||||
```
|
||||
|
||||
2. Statement-level suppression
|
||||
|
||||
A comment directive at the same indentation level as a statement it
|
||||
immediately precedes will also suppress any linter messages in that entire
|
||||
statement.
|
||||
|
||||
eg. In this example all messages for `SomeFunc()` will be suppressed.
|
||||
|
||||
```go
|
||||
// nolint
|
||||
func SomeFunc() {
|
||||
}
|
||||
```
|
||||
|
||||
Implementation details: gometalinter now performs parsing of Go source code,
|
||||
to extract linter directives and associate them with line ranges. To avoid
|
||||
unnecessary processing, parsing is on-demand: the first time a linter emits a
|
||||
message for a file, that file is parsed for directives.
|
||||
|
||||
## Quickstart
|
||||
|
||||
Install gometalinter (see above).
|
||||
|
||||
Install all known linters:
|
||||
|
||||
```
|
||||
$ gometalinter --install
|
||||
Installing:
|
||||
structcheck
|
||||
aligncheck
|
||||
deadcode
|
||||
gocyclo
|
||||
ineffassign
|
||||
dupl
|
||||
golint
|
||||
gotype
|
||||
goimports
|
||||
errcheck
|
||||
varcheck
|
||||
interfacer
|
||||
goconst
|
||||
gosimple
|
||||
staticcheck
|
||||
unparam
|
||||
unused
|
||||
misspell
|
||||
lll
|
||||
gas
|
||||
safesql
|
||||
```
|
||||
|
||||
Run it:
|
||||
|
||||
```
|
||||
$ cd example
|
||||
$ gometalinter ./...
|
||||
stutter.go:13::warning: unused struct field MyStruct.Unused (structcheck)
|
||||
stutter.go:9::warning: unused global variable unusedGlobal (varcheck)
|
||||
stutter.go:12:6:warning: exported type MyStruct should have comment or be unexported (golint)
|
||||
stutter.go:16:6:warning: exported type PublicUndocumented should have comment or be unexported (golint)
|
||||
stutter.go:8:1:warning: unusedGlobal is unused (deadcode)
|
||||
stutter.go:12:1:warning: MyStruct is unused (deadcode)
|
||||
stutter.go:16:1:warning: PublicUndocumented is unused (deadcode)
|
||||
stutter.go:20:1:warning: duplicateDefer is unused (deadcode)
|
||||
stutter.go:21:15:warning: error return value not checked (defer a.Close()) (errcheck)
|
||||
stutter.go:22:15:warning: error return value not checked (defer a.Close()) (errcheck)
|
||||
stutter.go:27:6:warning: error return value not checked (doit() // test for errcheck) (errcheck)
|
||||
stutter.go:29::error: unreachable code (vet)
|
||||
stutter.go:26::error: missing argument for Printf("%d"): format reads arg 1, have only 0 args (vet)
|
||||
```
|
||||
|
||||
|
||||
Gometalinter also supports the commonly seen `<path>/...` recursive path
|
||||
format. Note that this can be *very* slow, and you may need to increase the linter `--deadline` to allow linters to complete.
|
||||
|
||||
## FAQ
|
||||
|
||||
### Exit status
|
||||
|
||||
gometalinter sets two bits of the exit status to indicate different issues:
|
||||
|
||||
| Bit | Meaning
|
||||
|-----|----------
|
||||
| 0 | A linter generated an issue.
|
||||
| 1 | An underlying error occurred; eg. a linter failed to execute. In this situation a warning will also be displayed.
|
||||
|
||||
eg. linter only = 1, underlying only = 2, linter + underlying = 3
|
||||
|
||||
### What's the best way to use `gometalinter` in CI?
|
||||
|
||||
There are two main problems running in a CI:
|
||||
|
||||
1. <s>Linters break, causing `gometalinter --install --update` to error</s> (this is no longer an issue as all linters are vendored).
|
||||
2. `gometalinter` adds a new linter.
|
||||
|
||||
I have solved 1 by vendoring the linters.
|
||||
|
||||
For 2, the best option is to disable all linters, then explicitly enable the
|
||||
ones you want:
|
||||
|
||||
gometalinter --disable-all --enable=errcheck --enable=vet --enable=vetshadow ...
|
||||
|
||||
### How do I make `gometalinter` work with Go 1.5 vendoring?
|
||||
|
||||
`gometalinter` has a `--vendor` flag that just sets `GO15VENDOREXPERIMENT=1`, however the
|
||||
underlying tools must support it. Ensure that all of the linters are up to date and built with Go 1.5
|
||||
(`gometalinter --install --force`) then run `gometalinter --vendor .`. That should be it.
|
||||
|
||||
### Why does `gometalinter --install` install a fork of gocyclo?
|
||||
|
||||
I forked `gocyclo` because the upstream behaviour is to recursively check all
|
||||
subdirectories even when just a single directory is specified. This made it
|
||||
unusably slow when vendoring. The recursive behaviour can be achieved with
|
||||
gometalinter by explicitly specifying `<path>/...`. There is a
|
||||
[pull request](https://github.com/fzipp/gocyclo/pull/1) open.
|
||||
|
||||
### Gometalinter is not working
|
||||
|
||||
That's more of a statement than a question, but okay.
|
||||
|
||||
Sometimes gometalinter will not report issues that you think it should. There
|
||||
are three things to try in that case:
|
||||
|
||||
#### 1. Update to the latest build of gometalinter and all linters
|
||||
|
||||
go get -u github.com/alecthomas/gometalinter
|
||||
gometalinter --install
|
||||
|
||||
If you're lucky, this will fix the problem.
|
||||
|
||||
#### 2. Analyse the debug output
|
||||
|
||||
If that doesn't help, the problem may be elsewhere (in no particular order):
|
||||
|
||||
1. Upstream linter has changed its output or semantics.
|
||||
2. gometalinter is not invoking the tool correctly.
|
||||
3. gometalinter regular expression matches are not correct for a linter.
|
||||
4. Linter is exceeding the deadline.
|
||||
|
||||
To find out what's going on run in debug mode:
|
||||
|
||||
gometalinter --debug
|
||||
|
||||
This will show all output from the linters and should indicate why it is
|
||||
failing.
|
||||
|
||||
#### 3. Report an issue.
|
||||
|
||||
Failing all else, if the problem looks like a bug please file an issue and
|
||||
include the output of `gometalinter --debug`.
|
||||
|
||||
## Details
|
||||
|
||||
Additional linters can be configured via the command line:
|
||||
|
||||
```
|
||||
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf {path}:PATH:LINE:MESSAGE' .
|
||||
stutter.go:21:15:warning: error return value not checked (defer a.Close()) (errcheck)
|
||||
stutter.go:22:15:warning: error return value not checked (defer a.Close()) (errcheck)
|
||||
stutter.go:27:6:warning: error return value not checked (doit() // test for errcheck) (errcheck)
|
||||
stutter.go:9::warning: unused global variable unusedGlobal (varcheck)
|
||||
stutter.go:13::warning: unused struct field MyStruct.Unused (structcheck)
|
||||
stutter.go:12:6:warning: exported type MyStruct should have comment or be unexported (golint)
|
||||
stutter.go:16:6:warning: exported type PublicUndocumented should have comment or be unexported (deadcode)
|
||||
```
|
||||
|
||||
## Checkstyle XML format
|
||||
|
||||
`gometalinter` supports [checkstyle](http://checkstyle.sourceforge.net/)
|
||||
compatible XML output format. It is triggered with `--checkstyle` flag:
|
||||
|
||||
gometalinter --checkstyle
|
||||
|
||||
Checkstyle format can be used to integrate gometalinter with Jenkins CI with the
|
||||
help of [Checkstyle Plugin](https://wiki.jenkins-ci.org/display/JENKINS/Checkstyle+Plugin).
|
50
tools/vendor/github.com/alecthomas/gometalinter/aggregate.go
generated
vendored
Normal file
50
tools/vendor/github.com/alecthomas/gometalinter/aggregate.go
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type (
|
||||
issueKey struct {
|
||||
path string
|
||||
line, col int
|
||||
message string
|
||||
}
|
||||
|
||||
multiIssue struct {
|
||||
*Issue
|
||||
linterNames []string
|
||||
}
|
||||
)
|
||||
|
||||
func aggregateIssues(issues chan *Issue) chan *Issue {
|
||||
out := make(chan *Issue, 1000000)
|
||||
issueMap := make(map[issueKey]*multiIssue)
|
||||
go func() {
|
||||
for issue := range issues {
|
||||
key := issueKey{
|
||||
path: issue.Path,
|
||||
line: issue.Line,
|
||||
col: issue.Col,
|
||||
message: issue.Message,
|
||||
}
|
||||
if existing, ok := issueMap[key]; ok {
|
||||
existing.linterNames = append(existing.linterNames, issue.Linter.Name)
|
||||
} else {
|
||||
issueMap[key] = &multiIssue{
|
||||
Issue: issue,
|
||||
linterNames: []string{issue.Linter.Name},
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, multi := range issueMap {
|
||||
issue := multi.Issue
|
||||
sort.Strings(multi.linterNames)
|
||||
issue.Linter.Name = strings.Join(multi.linterNames, ", ")
|
||||
out <- issue
|
||||
}
|
||||
close(out)
|
||||
}()
|
||||
return out
|
||||
}
|
66
tools/vendor/github.com/alecthomas/gometalinter/checkstyle.go
generated
vendored
Normal file
66
tools/vendor/github.com/alecthomas/gometalinter/checkstyle.go
generated
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
|
||||
"gopkg.in/alecthomas/kingpin.v3-unstable"
|
||||
)
|
||||
|
||||
type checkstyleOutput struct {
|
||||
XMLName xml.Name `xml:"checkstyle"`
|
||||
Version string `xml:"version,attr"`
|
||||
Files []*checkstyleFile `xml:"file"`
|
||||
}
|
||||
|
||||
type checkstyleFile struct {
|
||||
Name string `xml:"name,attr"`
|
||||
Errors []*checkstyleError `xml:"error"`
|
||||
}
|
||||
|
||||
type checkstyleError struct {
|
||||
Column int `xml:"column,attr"`
|
||||
Line int `xml:"line,attr"`
|
||||
Message string `xml:"message,attr"`
|
||||
Severity string `xml:"severity,attr"`
|
||||
Source string `xml:"source,attr"`
|
||||
}
|
||||
|
||||
func outputToCheckstyle(issues chan *Issue) int {
|
||||
var lastFile *checkstyleFile
|
||||
out := checkstyleOutput{
|
||||
Version: "5.0",
|
||||
}
|
||||
status := 0
|
||||
for issue := range issues {
|
||||
if lastFile != nil && lastFile.Name != issue.Path {
|
||||
out.Files = append(out.Files, lastFile)
|
||||
lastFile = nil
|
||||
}
|
||||
if lastFile == nil {
|
||||
lastFile = &checkstyleFile{
|
||||
Name: issue.Path,
|
||||
}
|
||||
}
|
||||
|
||||
if config.Errors && issue.Severity != Error {
|
||||
continue
|
||||
}
|
||||
|
||||
lastFile.Errors = append(lastFile.Errors, &checkstyleError{
|
||||
Column: issue.Col,
|
||||
Line: issue.Line,
|
||||
Message: issue.Message,
|
||||
Severity: string(issue.Severity),
|
||||
Source: issue.Linter.Name,
|
||||
})
|
||||
status = 1
|
||||
}
|
||||
if lastFile != nil {
|
||||
out.Files = append(out.Files, lastFile)
|
||||
}
|
||||
d, err := xml.Marshal(&out)
|
||||
kingpin.FatalIfError(err, "")
|
||||
fmt.Printf("%s%s\n", xml.Header, d)
|
||||
return status
|
||||
}
|
188
tools/vendor/github.com/alecthomas/gometalinter/config.go
generated
vendored
Normal file
188
tools/vendor/github.com/alecthomas/gometalinter/config.go
generated
vendored
Normal file
@ -0,0 +1,188 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"gopkg.in/alecthomas/kingpin.v3-unstable"
|
||||
)
|
||||
|
||||
// Config for gometalinter. This can be loaded from a JSON file with --config.
|
||||
type Config struct { // nolint: aligncheck
|
||||
// A map of linter name to "<command>:<pattern>".
|
||||
//
|
||||
// <command> should always include {path} as the target directory to execute. Globs in <command>
|
||||
// are expanded by gometalinter (not by the shell).
|
||||
Linters map[string]string
|
||||
|
||||
// The set of linters that should be enabled.
|
||||
Enable []string
|
||||
Disable []string
|
||||
|
||||
// A map of linter name to message that is displayed. This is useful when linters display text
|
||||
// that is useful only in isolation, such as errcheck which just reports the construct.
|
||||
MessageOverride map[string]string
|
||||
Severity map[string]string
|
||||
VendoredLinters bool
|
||||
Format string
|
||||
Fast bool
|
||||
Install bool
|
||||
Update bool
|
||||
Force bool
|
||||
DownloadOnly bool
|
||||
Debug bool
|
||||
Concurrency int
|
||||
Exclude []string
|
||||
Include []string
|
||||
Skip []string
|
||||
Vendor bool
|
||||
Cyclo int
|
||||
LineLength int
|
||||
MinConfidence float64
|
||||
MinOccurrences int
|
||||
MinConstLength int
|
||||
DuplThreshold int
|
||||
Sort []string
|
||||
Test bool
|
||||
Deadline time.Duration `json:"-"`
|
||||
Errors bool
|
||||
JSON bool
|
||||
Checkstyle bool
|
||||
EnableGC bool
|
||||
Aggregate bool
|
||||
|
||||
DeadlineJSONCrutch string `json:"Deadline"`
|
||||
}
|
||||
|
||||
// Configuration defaults.
|
||||
var (
|
||||
vetRe = `^(?:vet:.*?\.go:\s+(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*))|(?:(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*))$`
|
||||
|
||||
predefinedPatterns = map[string]string{
|
||||
"PATH:LINE:COL:MESSAGE": `^(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
|
||||
"PATH:LINE:MESSAGE": `^(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*)$`,
|
||||
}
|
||||
formatTemplate = &template.Template{}
|
||||
installMap = map[string]string{
|
||||
"aligncheck": "github.com/opennota/check/cmd/aligncheck",
|
||||
"deadcode": "github.com/tsenart/deadcode",
|
||||
"dupl": "github.com/mibk/dupl",
|
||||
"errcheck": "github.com/kisielk/errcheck",
|
||||
"gas": "github.com/GoASTScanner/gas",
|
||||
"goconst": "github.com/jgautheron/goconst/cmd/goconst",
|
||||
"gocyclo": "github.com/alecthomas/gocyclo",
|
||||
"goimports": "golang.org/x/tools/cmd/goimports",
|
||||
"golint": "github.com/golang/lint/golint",
|
||||
"gosimple": "honnef.co/go/tools/cmd/gosimple",
|
||||
"gotype": "golang.org/x/tools/cmd/gotype",
|
||||
"ineffassign": "github.com/gordonklaus/ineffassign",
|
||||
"interfacer": "github.com/mvdan/interfacer/cmd/interfacer",
|
||||
"lll": "github.com/walle/lll/cmd/lll",
|
||||
"misspell": "github.com/client9/misspell/cmd/misspell",
|
||||
"safesql": "github.com/stripe/safesql",
|
||||
"staticcheck": "honnef.co/go/tools/cmd/staticcheck",
|
||||
"structcheck": "github.com/opennota/check/cmd/structcheck",
|
||||
"unconvert": "github.com/mdempsky/unconvert",
|
||||
"unparam": "github.com/mvdan/unparam",
|
||||
"unused": "honnef.co/go/tools/cmd/unused",
|
||||
"varcheck": "github.com/opennota/check/cmd/varcheck",
|
||||
}
|
||||
acceptsEllipsis = map[string]bool{
|
||||
"aligncheck": true,
|
||||
"errcheck": true,
|
||||
"golint": true,
|
||||
"gosimple": true,
|
||||
"interfacer": true,
|
||||
"staticcheck": true,
|
||||
"structcheck": true,
|
||||
"test": true,
|
||||
"varcheck": true,
|
||||
"unconvert": true,
|
||||
}
|
||||
slowLinters = []string{"structcheck", "varcheck", "errcheck", "aligncheck", "testify", "test", "interfacer", "unconvert", "deadcode", "safesql", "staticcheck", "unparam", "unused", "gosimple"}
|
||||
sortKeys = []string{"none", "path", "line", "column", "severity", "message", "linter"}
|
||||
|
||||
// Linter definitions.
|
||||
linterDefinitions = map[string]string{
|
||||
"aligncheck": `aligncheck {path}:^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
|
||||
"deadcode": `deadcode {path}:^deadcode: (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
|
||||
"dupl": `dupl -plumbing -threshold {duplthreshold} {path}/*.go:^(?P<path>.*?\.go):(?P<line>\d+)-\d+:\s*(?P<message>.*)$`,
|
||||
"errcheck": `errcheck -abspath {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"gas": `gas -fmt=csv {path}/*.go:^(?P<path>.*?\.go),(?P<line>\d+),(?P<message>[^,]+,[^,]+,[^,]+)`,
|
||||
"goconst": `goconst -min-occurrences {min_occurrences} -min-length {min_const_length} {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"gocyclo": `gocyclo -over {mincyclo} {path}:^(?P<cyclo>\d+)\s+\S+\s(?P<function>\S+)\s+(?P<path>.*?\.go):(?P<line>\d+):(\d+)$`,
|
||||
"gofmt": `gofmt -l -s {path}/*.go:^(?P<path>.*?\.go)$`,
|
||||
"goimports": `goimports -l {path}/*.go:^(?P<path>.*?\.go)$`,
|
||||
"golint": "golint -min_confidence {min_confidence} {path}:PATH:LINE:COL:MESSAGE",
|
||||
"gosimple": "gosimple {path}:PATH:LINE:COL:MESSAGE",
|
||||
"gotype": "gotype -e {tests=-a} {path}:PATH:LINE:COL:MESSAGE",
|
||||
"ineffassign": `ineffassign -n {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"interfacer": `interfacer {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"lll": `lll -g -l {maxlinelength} {path}/*.go:PATH:LINE:MESSAGE`,
|
||||
"misspell": "misspell -j 1 {path}/*.go:PATH:LINE:COL:MESSAGE",
|
||||
"safesql": `safesql {path}:^- (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+)$`,
|
||||
"staticcheck": "staticcheck {path}:PATH:LINE:COL:MESSAGE",
|
||||
"structcheck": `structcheck {tests=-t} {path}:^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
|
||||
"test": `go test {path}:^--- FAIL: .*$\s+(?P<path>.*?\.go):(?P<line>\d+): (?P<message>.*)$`,
|
||||
"testify": `go test {path}:Location:\s+(?P<path>.*?\.go):(?P<line>\d+)$\s+Error:\s+(?P<message>[^\n]+)`,
|
||||
"unconvert": "unconvert {path}:PATH:LINE:COL:MESSAGE",
|
||||
"unparam": `unparam {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"unused": `unused {path}:PATH:LINE:COL:MESSAGE`,
|
||||
"varcheck": `varcheck {path}:^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
|
||||
"vet": `go tool vet {path}/*.go:` + vetRe,
|
||||
"vetshadow": `go tool vet --shadow {path}/*.go:` + vetRe,
|
||||
}
|
||||
|
||||
pathsArg = kingpin.Arg("path", "Directories to lint. Defaults to \".\". <path>/... will recurse.").Strings()
|
||||
|
||||
config = &Config{
|
||||
Format: "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})",
|
||||
|
||||
Severity: map[string]string{
|
||||
"gotype": "error",
|
||||
"test": "error",
|
||||
"testify": "error",
|
||||
"vet": "error",
|
||||
},
|
||||
MessageOverride: map[string]string{
|
||||
"errcheck": "error return value not checked ({message})",
|
||||
"gocyclo": "cyclomatic complexity {cyclo} of function {function}() is high (> {mincyclo})",
|
||||
"gofmt": "file is not gofmted with -s",
|
||||
"goimports": "file is not goimported",
|
||||
"safesql": "potentially unsafe SQL statement",
|
||||
"structcheck": "unused struct field {message}",
|
||||
"unparam": "parameter {message}",
|
||||
"varcheck": "unused variable or constant {message}",
|
||||
},
|
||||
Enable: []string{
|
||||
"aligncheck",
|
||||
"deadcode",
|
||||
"errcheck",
|
||||
"gas",
|
||||
"goconst",
|
||||
"gocyclo",
|
||||
"golint",
|
||||
"gosimple",
|
||||
"gotype",
|
||||
"ineffassign",
|
||||
"interfacer",
|
||||
"staticcheck",
|
||||
"structcheck",
|
||||
"unconvert",
|
||||
"varcheck",
|
||||
"vet",
|
||||
"vetshadow",
|
||||
},
|
||||
VendoredLinters: true,
|
||||
Concurrency: runtime.NumCPU(),
|
||||
Cyclo: 10,
|
||||
LineLength: 80,
|
||||
MinConfidence: 0.8,
|
||||
MinOccurrences: 3,
|
||||
MinConstLength: 3,
|
||||
DuplThreshold: 50,
|
||||
Sort: []string{"none"},
|
||||
Deadline: time.Second * 30,
|
||||
}
|
||||
)
|
164
tools/vendor/github.com/alecthomas/gometalinter/directives.go
generated
vendored
Normal file
164
tools/vendor/github.com/alecthomas/gometalinter/directives.go
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ignoredRange struct {
|
||||
col int
|
||||
start, end int
|
||||
linters []string
|
||||
}
|
||||
|
||||
func (i *ignoredRange) matches(issue *Issue) bool {
|
||||
if issue.Line < i.start || issue.Line > i.end {
|
||||
return false
|
||||
}
|
||||
if len(i.linters) == 0 {
|
||||
return true
|
||||
}
|
||||
for _, l := range i.linters {
|
||||
if l == issue.Linter.Name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (i *ignoredRange) near(col, start, end int) bool {
|
||||
return col == i.col && i.end == start-1
|
||||
}
|
||||
|
||||
type ignoredRanges []*ignoredRange
|
||||
|
||||
func (ir ignoredRanges) Len() int { return len(ir) }
|
||||
func (ir ignoredRanges) Swap(i, j int) { ir[i], ir[j] = ir[j], ir[i] }
|
||||
func (ir ignoredRanges) Less(i, j int) bool { return ir[i].end < ir[j].end }
|
||||
|
||||
type directiveParser struct {
|
||||
paths []string
|
||||
lock sync.Mutex
|
||||
files map[string]ignoredRanges
|
||||
fset *token.FileSet
|
||||
}
|
||||
|
||||
func newDirectiveParser(paths []string) *directiveParser {
|
||||
return &directiveParser{
|
||||
paths: paths,
|
||||
files: map[string]ignoredRanges{},
|
||||
fset: token.NewFileSet(),
|
||||
}
|
||||
}
|
||||
|
||||
// IsIgnored returns true if the given linter issue is ignored by a linter directive.
|
||||
func (d *directiveParser) IsIgnored(issue *Issue) bool {
|
||||
d.lock.Lock()
|
||||
ranges, ok := d.files[issue.Path]
|
||||
if !ok {
|
||||
ranges = d.parseFile(issue.Path)
|
||||
sort.Sort(ranges)
|
||||
d.files[issue.Path] = ranges
|
||||
}
|
||||
d.lock.Unlock()
|
||||
for _, r := range ranges {
|
||||
if r.matches(issue) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Takes a set of ignoredRanges, determines if they immediately precede a statement
|
||||
// construct, and expands the range to include that construct. Why? So you can
|
||||
// precede a function or struct with //nolint
|
||||
type rangeExpander struct {
|
||||
fset *token.FileSet
|
||||
ranges ignoredRanges
|
||||
}
|
||||
|
||||
func (a *rangeExpander) Visit(node ast.Node) ast.Visitor {
|
||||
if node == nil {
|
||||
return a
|
||||
}
|
||||
startPos := a.fset.Position(node.Pos())
|
||||
start := startPos.Line
|
||||
end := a.fset.Position(node.End()).Line
|
||||
found := sort.Search(len(a.ranges), func(i int) bool {
|
||||
return a.ranges[i].end+1 >= start
|
||||
})
|
||||
if found < len(a.ranges) && a.ranges[found].near(startPos.Column, start, end) {
|
||||
r := a.ranges[found]
|
||||
if r.start > start {
|
||||
r.start = start
|
||||
}
|
||||
if r.end < end {
|
||||
r.end = end
|
||||
}
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
func (d *directiveParser) parseFile(path string) ignoredRanges {
|
||||
start := time.Now()
|
||||
debug("nolint: parsing %s for directives", path)
|
||||
file, err := parser.ParseFile(d.fset, path, nil, parser.ParseComments)
|
||||
if err != nil {
|
||||
debug("nolint: failed to parse %q: %s", path, err)
|
||||
return nil
|
||||
}
|
||||
ranges := extractCommentGroupRange(d.fset, file.Comments...)
|
||||
visitor := &rangeExpander{fset: d.fset, ranges: ranges}
|
||||
ast.Walk(visitor, file)
|
||||
debug("nolint: parsing %s took %s", path, time.Since(start))
|
||||
return visitor.ranges
|
||||
}
|
||||
|
||||
func extractCommentGroupRange(fset *token.FileSet, comments ...*ast.CommentGroup) (ranges ignoredRanges) {
|
||||
for _, g := range comments {
|
||||
for _, c := range g.List {
|
||||
text := strings.TrimLeft(c.Text, "/ ")
|
||||
var linters []string
|
||||
if strings.HasPrefix(text, "nolint") {
|
||||
if strings.HasPrefix(text, "nolint:") {
|
||||
for _, linter := range strings.Split(text[7:], ",") {
|
||||
linters = append(linters, strings.TrimSpace(linter))
|
||||
}
|
||||
}
|
||||
pos := fset.Position(g.Pos())
|
||||
rng := &ignoredRange{
|
||||
col: pos.Column,
|
||||
start: pos.Line,
|
||||
end: fset.Position(g.End()).Line,
|
||||
linters: linters,
|
||||
}
|
||||
ranges = append(ranges, rng)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *directiveParser) in(n ast.Node, issue *Issue) bool {
|
||||
start := d.fset.Position(n.Pos())
|
||||
end := d.fset.Position(n.End())
|
||||
return issue.Line >= start.Line && issue.Line <= end.Line
|
||||
}
|
||||
|
||||
func filterIssuesViaDirectives(directives *directiveParser, issues chan *Issue) chan *Issue {
|
||||
out := make(chan *Issue, 1000000)
|
||||
go func() {
|
||||
for issue := range issues {
|
||||
if !directives.IsIgnored(issue) {
|
||||
out <- issue
|
||||
}
|
||||
}
|
||||
close(out)
|
||||
}()
|
||||
return out
|
||||
}
|
908
tools/vendor/github.com/alecthomas/gometalinter/main.go
generated
vendored
Normal file
908
tools/vendor/github.com/alecthomas/gometalinter/main.go
generated
vendored
Normal file
@ -0,0 +1,908 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/google/shlex"
|
||||
"gopkg.in/alecthomas/kingpin.v3-unstable"
|
||||
)
|
||||
|
||||
// Severity of linter message.
|
||||
type Severity string
|
||||
|
||||
// Linter message severity levels.
|
||||
const ( // nolint
|
||||
Warning Severity = "warning"
|
||||
Error Severity = "error"
|
||||
)
|
||||
|
||||
var (
|
||||
// Locations to look for vendored linters.
|
||||
vendoredSearchPaths = [][]string{
|
||||
{"github.com", "alecthomas", "gometalinter", "_linters"},
|
||||
{"gopkg.in", "alecthomas", "gometalinter.v1", "_linters"},
|
||||
}
|
||||
)
|
||||
|
||||
type Linter struct {
|
||||
Name string `json:"name"`
|
||||
Command string `json:"command"`
|
||||
CompositeCommand string `json:"composite_command,omitempty"`
|
||||
Pattern string `json:"pattern"`
|
||||
InstallFrom string `json:"install_from"`
|
||||
SeverityOverride Severity `json:"severity,omitempty"`
|
||||
MessageOverride string `json:"message_override,omitempty"`
|
||||
|
||||
regex *regexp.Regexp
|
||||
}
|
||||
|
||||
func (l *Linter) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(l.Name)
|
||||
}
|
||||
|
||||
func (l *Linter) String() string {
|
||||
return l.Name
|
||||
}
|
||||
|
||||
func LinterFromName(name string) *Linter {
|
||||
s := linterDefinitions[name]
|
||||
parts := strings.SplitN(s, ":", 2)
|
||||
if len(parts) < 2 {
|
||||
kingpin.Fatalf("invalid linter: %q", name)
|
||||
}
|
||||
|
||||
pattern := parts[1]
|
||||
if p, ok := predefinedPatterns[pattern]; ok {
|
||||
pattern = p
|
||||
}
|
||||
re, err := regexp.Compile("(?m:" + pattern + ")")
|
||||
kingpin.FatalIfError(err, "invalid regex for %q", name)
|
||||
return &Linter{
|
||||
Name: name,
|
||||
Command: s[0:strings.Index(s, ":")],
|
||||
Pattern: pattern,
|
||||
InstallFrom: installMap[name],
|
||||
SeverityOverride: Severity(config.Severity[name]),
|
||||
MessageOverride: config.MessageOverride[name],
|
||||
regex: re,
|
||||
}
|
||||
}
|
||||
|
||||
type sortedIssues struct {
|
||||
issues []*Issue
|
||||
order []string
|
||||
}
|
||||
|
||||
func (s *sortedIssues) Len() int { return len(s.issues) }
|
||||
func (s *sortedIssues) Swap(i, j int) { s.issues[i], s.issues[j] = s.issues[j], s.issues[i] }
|
||||
|
||||
// nolint: gocyclo
|
||||
func (s *sortedIssues) Less(i, j int) bool {
|
||||
l, r := s.issues[i], s.issues[j]
|
||||
for _, key := range s.order {
|
||||
switch key {
|
||||
case "path":
|
||||
if l.Path > r.Path {
|
||||
return false
|
||||
}
|
||||
case "line":
|
||||
if l.Line > r.Line {
|
||||
return false
|
||||
}
|
||||
case "column":
|
||||
if l.Col > r.Col {
|
||||
return false
|
||||
}
|
||||
case "severity":
|
||||
if l.Severity > r.Severity {
|
||||
return false
|
||||
}
|
||||
case "message":
|
||||
if l.Message > r.Message {
|
||||
return false
|
||||
}
|
||||
case "linter":
|
||||
if l.Linter.Name > r.Linter.Name {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func init() {
|
||||
kingpin.Flag("config", "Load JSON configuration from file.").Action(loadConfig).String()
|
||||
kingpin.Flag("disable", "Disable previously enabled linters.").PlaceHolder("LINTER").Short('D').Action(disableAction).Strings()
|
||||
kingpin.Flag("enable", "Enable previously disabled linters.").PlaceHolder("LINTER").Short('E').Action(enableAction).Strings()
|
||||
kingpin.Flag("linter", "Define a linter.").PlaceHolder("NAME:COMMAND:PATTERN").StringMapVar(&config.Linters)
|
||||
kingpin.Flag("message-overrides", "Override message from linter. {message} will be expanded to the original message.").PlaceHolder("LINTER:MESSAGE").StringMapVar(&config.MessageOverride)
|
||||
kingpin.Flag("severity", "Map of linter severities.").PlaceHolder("LINTER:SEVERITY").StringMapVar(&config.Severity)
|
||||
kingpin.Flag("disable-all", "Disable all linters.").Action(disableAllAction).Bool()
|
||||
kingpin.Flag("enable-all", "Enable all linters.").Action(enableAllAction).Bool()
|
||||
kingpin.Flag("format", "Output format.").PlaceHolder(config.Format).StringVar(&config.Format)
|
||||
kingpin.Flag("vendored-linters", "Use vendored linters (recommended).").BoolVar(&config.VendoredLinters)
|
||||
kingpin.Flag("fast", "Only run fast linters.").BoolVar(&config.Fast)
|
||||
kingpin.Flag("install", "Attempt to install all known linters.").Short('i').BoolVar(&config.Install)
|
||||
kingpin.Flag("update", "Pass -u to go tool when installing.").Short('u').BoolVar(&config.Update)
|
||||
kingpin.Flag("force", "Pass -f to go tool when installing.").Short('f').BoolVar(&config.Force)
|
||||
kingpin.Flag("download-only", "Pass -d to go tool when installing.").BoolVar(&config.DownloadOnly)
|
||||
kingpin.Flag("debug", "Display messages for failed linters, etc.").Short('d').BoolVar(&config.Debug)
|
||||
kingpin.Flag("concurrency", "Number of concurrent linters to run.").PlaceHolder(fmt.Sprintf("%d", runtime.NumCPU())).Short('j').IntVar(&config.Concurrency)
|
||||
kingpin.Flag("exclude", "Exclude messages matching these regular expressions.").Short('e').PlaceHolder("REGEXP").StringsVar(&config.Exclude)
|
||||
kingpin.Flag("include", "Include messages matching these regular expressions.").Short('I').PlaceHolder("REGEXP").StringsVar(&config.Include)
|
||||
kingpin.Flag("skip", "Skip directories with this name when expanding '...'.").Short('s').PlaceHolder("DIR...").StringsVar(&config.Skip)
|
||||
kingpin.Flag("vendor", "Enable vendoring support (skips 'vendor' directories and sets GO15VENDOREXPERIMENT=1).").BoolVar(&config.Vendor)
|
||||
kingpin.Flag("cyclo-over", "Report functions with cyclomatic complexity over N (using gocyclo).").PlaceHolder("10").IntVar(&config.Cyclo)
|
||||
kingpin.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength)
|
||||
kingpin.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence)
|
||||
kingpin.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences)
|
||||
kingpin.Flag("min-const-length", "Minimumum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength)
|
||||
kingpin.Flag("dupl-threshold", "Minimum token sequence as a clone for dupl.").PlaceHolder("50").IntVar(&config.DuplThreshold)
|
||||
kingpin.Flag("sort", fmt.Sprintf("Sort output by any of %s.", strings.Join(sortKeys, ", "))).PlaceHolder("none").EnumsVar(&config.Sort, sortKeys...)
|
||||
kingpin.Flag("tests", "Include test files for linters that support this option").Short('t').BoolVar(&config.Test)
|
||||
kingpin.Flag("deadline", "Cancel linters if they have not completed within this duration.").PlaceHolder("30s").DurationVar(&config.Deadline)
|
||||
kingpin.Flag("errors", "Only show errors.").BoolVar(&config.Errors)
|
||||
kingpin.Flag("json", "Generate structured JSON rather than standard line-based output.").BoolVar(&config.JSON)
|
||||
kingpin.Flag("checkstyle", "Generate checkstyle XML rather than standard line-based output.").BoolVar(&config.Checkstyle)
|
||||
kingpin.Flag("enable-gc", "Enable GC for linters (useful on large repositories).").BoolVar(&config.EnableGC)
|
||||
kingpin.Flag("aggregate", "Aggregate issues reported by several linters.").BoolVar(&config.Aggregate)
|
||||
kingpin.CommandLine.GetFlag("help").Short('h')
|
||||
}
|
||||
|
||||
func loadConfig(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
r, err := os.Open(*element.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer r.Close() // nolint: errcheck
|
||||
err = json.NewDecoder(r).Decode(config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if config.DeadlineJSONCrutch != "" {
|
||||
config.Deadline, err = time.ParseDuration(config.DeadlineJSONCrutch)
|
||||
}
|
||||
for _, disable := range config.Disable {
|
||||
for i, enable := range config.Enable {
|
||||
if enable == disable {
|
||||
config.Enable = append(config.Enable[:i], config.Enable[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func disableAction(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
out := []string{}
|
||||
for _, linter := range config.Enable {
|
||||
if linter != *element.Value {
|
||||
out = append(out, linter)
|
||||
}
|
||||
}
|
||||
config.Enable = out
|
||||
return nil
|
||||
}
|
||||
|
||||
func enableAction(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
config.Enable = append(config.Enable, *element.Value)
|
||||
return nil
|
||||
}
|
||||
|
||||
func disableAllAction(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
config.Enable = []string{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func enableAllAction(app *kingpin.Application, element *kingpin.ParseElement, ctx *kingpin.ParseContext) error {
|
||||
for linter := range linterDefinitions {
|
||||
config.Enable = append(config.Enable, linter)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Issue struct {
|
||||
Linter *Linter `json:"linter"`
|
||||
Severity Severity `json:"severity"`
|
||||
Path string `json:"path"`
|
||||
Line int `json:"line"`
|
||||
Col int `json:"col"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func (i *Issue) String() string {
|
||||
buf := new(bytes.Buffer)
|
||||
err := formatTemplate.Execute(buf, i)
|
||||
kingpin.FatalIfError(err, "Invalid output format")
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func debug(format string, args ...interface{}) {
|
||||
if config.Debug {
|
||||
fmt.Fprintf(os.Stderr, "DEBUG: "+format+"\n", args...)
|
||||
}
|
||||
}
|
||||
|
||||
func warning(format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "WARNING: "+format+"\n", args...)
|
||||
}
|
||||
|
||||
func formatLinters() string {
|
||||
w := bytes.NewBuffer(nil)
|
||||
for name := range linterDefinitions {
|
||||
linter := LinterFromName(name)
|
||||
install := "(" + linter.InstallFrom + ")"
|
||||
if install == "()" {
|
||||
install = ""
|
||||
}
|
||||
fmt.Fprintf(w, " %s %s\n %s\n %s\n", name, install, linter.Command, linter.Pattern)
|
||||
}
|
||||
return w.String()
|
||||
}
|
||||
|
||||
func formatSeverity() string {
|
||||
w := bytes.NewBuffer(nil)
|
||||
for name, severity := range config.Severity {
|
||||
fmt.Fprintf(w, " %s -> %s\n", name, severity)
|
||||
}
|
||||
return w.String()
|
||||
}
|
||||
|
||||
type Vars map[string]string
|
||||
|
||||
func (v Vars) Copy() Vars {
|
||||
out := Vars{}
|
||||
for k, v := range v {
|
||||
out[k] = v
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (v Vars) Replace(s string) string {
|
||||
for k, v := range v {
|
||||
prefix := regexp.MustCompile(fmt.Sprintf("{%s=([^}]*)}", k))
|
||||
if v != "" {
|
||||
s = prefix.ReplaceAllString(s, "$1")
|
||||
} else {
|
||||
s = prefix.ReplaceAllString(s, "")
|
||||
}
|
||||
s = strings.Replace(s, fmt.Sprintf("{%s}", k), v, -1)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Linters are by their very nature, short lived, so disable GC.
|
||||
// Reduced (user) linting time on kingpin from 0.97s to 0.64s.
|
||||
kingpin.CommandLine.Help = fmt.Sprintf(`Aggregate and normalise the output of a whole bunch of Go linters.
|
||||
|
||||
PlaceHolder linters:
|
||||
|
||||
%s
|
||||
|
||||
Severity override map (default is "warning"):
|
||||
|
||||
%s
|
||||
`, formatLinters(), formatSeverity())
|
||||
kingpin.Parse()
|
||||
|
||||
configureEnvironment()
|
||||
|
||||
if config.Install {
|
||||
installLinters()
|
||||
return
|
||||
}
|
||||
|
||||
include, exclude := processConfig(config)
|
||||
|
||||
start := time.Now()
|
||||
paths := expandPaths(*pathsArg, config.Skip)
|
||||
|
||||
linters := lintersFromFlags()
|
||||
status := 0
|
||||
issues, errch := runLinters(linters, paths, *pathsArg, config.Concurrency, exclude, include)
|
||||
if config.JSON {
|
||||
status |= outputToJSON(issues)
|
||||
} else if config.Checkstyle {
|
||||
status |= outputToCheckstyle(issues)
|
||||
} else {
|
||||
status |= outputToConsole(issues)
|
||||
}
|
||||
for err := range errch {
|
||||
warning("%s", err)
|
||||
status |= 2
|
||||
}
|
||||
elapsed := time.Since(start)
|
||||
debug("total elapsed time %s", elapsed)
|
||||
os.Exit(status)
|
||||
}
|
||||
|
||||
// nolint: gocyclo
|
||||
func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Regexp) {
|
||||
// Move configured linters into linterDefinitions.
|
||||
for name, definition := range config.Linters {
|
||||
linterDefinitions[name] = definition
|
||||
}
|
||||
|
||||
tmpl, err := template.New("output").Parse(config.Format)
|
||||
kingpin.FatalIfError(err, "invalid format %q", config.Format)
|
||||
formatTemplate = tmpl
|
||||
if !config.EnableGC {
|
||||
_ = os.Setenv("GOGC", "off")
|
||||
}
|
||||
if config.VendoredLinters && config.Install && config.Update {
|
||||
warning(`Linters are now vendored by default, --update ignored. The original
|
||||
behaviour can be re-enabled with --no-vendored-linters.
|
||||
|
||||
To request an update for a vendored linter file an issue at:
|
||||
https://github.com/alecthomas/gometalinter/issues/new
|
||||
`)
|
||||
config.Update = false
|
||||
}
|
||||
// Force sorting by path if checkstyle mode is selected
|
||||
// !jsonFlag check is required to handle:
|
||||
// gometalinter --json --checkstyle --sort=severity
|
||||
if config.Checkstyle && !config.JSON {
|
||||
config.Sort = []string{"path"}
|
||||
}
|
||||
|
||||
// PlaceHolder to skipping "vendor" directory if GO15VENDOREXPERIMENT=1 is enabled.
|
||||
// TODO(alec): This will probably need to be enabled by default at a later time.
|
||||
if os.Getenv("GO15VENDOREXPERIMENT") == "1" || config.Vendor {
|
||||
if err := os.Setenv("GO15VENDOREXPERIMENT", "1"); err != nil {
|
||||
warning("setenv GO15VENDOREXPERIMENT: %s", err)
|
||||
}
|
||||
config.Skip = append(config.Skip, "vendor")
|
||||
config.Vendor = true
|
||||
}
|
||||
if len(config.Exclude) > 0 {
|
||||
exclude = regexp.MustCompile(strings.Join(config.Exclude, "|"))
|
||||
}
|
||||
|
||||
if len(config.Include) > 0 {
|
||||
include = regexp.MustCompile(strings.Join(config.Include, "|"))
|
||||
}
|
||||
|
||||
runtime.GOMAXPROCS(config.Concurrency)
|
||||
return include, exclude
|
||||
}
|
||||
|
||||
func outputToConsole(issues chan *Issue) int {
|
||||
status := 0
|
||||
for issue := range issues {
|
||||
if config.Errors && issue.Severity != Error {
|
||||
continue
|
||||
}
|
||||
fmt.Println(issue.String())
|
||||
status = 1
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
func outputToJSON(issues chan *Issue) int {
|
||||
fmt.Println("[")
|
||||
status := 0
|
||||
for issue := range issues {
|
||||
if config.Errors && issue.Severity != Error {
|
||||
continue
|
||||
}
|
||||
if status != 0 {
|
||||
fmt.Printf(",\n")
|
||||
}
|
||||
d, err := json.Marshal(issue)
|
||||
kingpin.FatalIfError(err, "")
|
||||
fmt.Printf(" %s", d)
|
||||
status = 1
|
||||
}
|
||||
fmt.Printf("\n]\n")
|
||||
return status
|
||||
}
|
||||
|
||||
func runLinters(linters map[string]*Linter, paths, ellipsisPaths []string, concurrency int, exclude *regexp.Regexp, include *regexp.Regexp) (chan *Issue, chan error) {
|
||||
errch := make(chan error, len(linters)*(len(paths)+len(ellipsisPaths)))
|
||||
concurrencych := make(chan bool, config.Concurrency)
|
||||
incomingIssues := make(chan *Issue, 1000000)
|
||||
directives := newDirectiveParser(paths)
|
||||
processedIssues := filterIssuesViaDirectives(directives, maybeSortIssues(maybeAggregateIssues(incomingIssues)))
|
||||
wg := &sync.WaitGroup{}
|
||||
for _, linter := range linters {
|
||||
// Recreated in each loop because it is mutated by executeLinter().
|
||||
vars := Vars{
|
||||
"duplthreshold": fmt.Sprintf("%d", config.DuplThreshold),
|
||||
"mincyclo": fmt.Sprintf("%d", config.Cyclo),
|
||||
"maxlinelength": fmt.Sprintf("%d", config.LineLength),
|
||||
"min_confidence": fmt.Sprintf("%f", config.MinConfidence),
|
||||
"min_occurrences": fmt.Sprintf("%d", config.MinOccurrences),
|
||||
"min_const_length": fmt.Sprintf("%d", config.MinConstLength),
|
||||
"tests": "",
|
||||
}
|
||||
if config.Test {
|
||||
vars["tests"] = "-t"
|
||||
}
|
||||
linterPaths := paths
|
||||
// Most linters don't exclude vendor paths when recursing, so we don't use ... paths.
|
||||
if acceptsEllipsis[linter.Name] && !config.Vendor && len(ellipsisPaths) > 0 {
|
||||
linterPaths = ellipsisPaths
|
||||
}
|
||||
for _, path := range linterPaths {
|
||||
wg.Add(1)
|
||||
deadline := time.After(config.Deadline)
|
||||
state := &linterState{
|
||||
Linter: linter,
|
||||
issues: incomingIssues,
|
||||
path: path,
|
||||
vars: vars.Copy(),
|
||||
exclude: exclude,
|
||||
include: include,
|
||||
deadline: deadline,
|
||||
}
|
||||
go func() {
|
||||
concurrencych <- true
|
||||
err := executeLinter(state)
|
||||
if err != nil {
|
||||
errch <- err
|
||||
}
|
||||
<-concurrencych
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(incomingIssues)
|
||||
close(errch)
|
||||
}()
|
||||
return processedIssues, errch
|
||||
}
|
||||
|
||||
// nolint: gocyclo
|
||||
func expandPaths(paths, skip []string) []string {
|
||||
if len(paths) == 0 {
|
||||
paths = []string{"."}
|
||||
}
|
||||
skipMap := map[string]bool{}
|
||||
for _, name := range skip {
|
||||
skipMap[name] = true
|
||||
}
|
||||
dirs := map[string]bool{}
|
||||
for _, path := range paths {
|
||||
if strings.HasSuffix(path, "/...") {
|
||||
root := filepath.Dir(path)
|
||||
_ = filepath.Walk(root, func(p string, i os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
warning("invalid path %q: %s", p, err)
|
||||
return err
|
||||
}
|
||||
|
||||
base := filepath.Base(p)
|
||||
skip := skipMap[base] || skipMap[p] || (strings.ContainsAny(base[0:1], "_.") && base != "." && base != "..")
|
||||
if i.IsDir() {
|
||||
if skip {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
} else if !skip && strings.HasSuffix(p, ".go") {
|
||||
dirs[filepath.Clean(filepath.Dir(p))] = true
|
||||
}
|
||||
return nil
|
||||
})
|
||||
} else {
|
||||
dirs[filepath.Clean(path)] = true
|
||||
}
|
||||
}
|
||||
out := make([]string, 0, len(dirs))
|
||||
for d := range dirs {
|
||||
out = append(out, d)
|
||||
}
|
||||
sort.Strings(out)
|
||||
for _, d := range out {
|
||||
debug("linting path %s", d)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func makeInstallCommand(linters ...string) []string {
|
||||
cmd := []string{"get"}
|
||||
if config.VendoredLinters {
|
||||
cmd = []string{"install"}
|
||||
} else {
|
||||
if config.Update {
|
||||
cmd = append(cmd, "-u")
|
||||
}
|
||||
if config.Force {
|
||||
cmd = append(cmd, "-f")
|
||||
}
|
||||
if config.DownloadOnly {
|
||||
cmd = append(cmd, "-d")
|
||||
}
|
||||
}
|
||||
if config.Debug {
|
||||
cmd = append(cmd, "-v")
|
||||
}
|
||||
cmd = append(cmd, linters...)
|
||||
return cmd
|
||||
}
|
||||
|
||||
func installLintersWithOneCommand(targets []string) error {
|
||||
cmd := makeInstallCommand(targets...)
|
||||
debug("go %s", strings.Join(cmd, " "))
|
||||
c := exec.Command("go", cmd...) // nolint: gas
|
||||
c.Stdout = os.Stdout
|
||||
c.Stderr = os.Stderr
|
||||
return c.Run()
|
||||
}
|
||||
|
||||
func installLintersIndividually(targets []string) {
|
||||
failed := []string{}
|
||||
for _, target := range targets {
|
||||
cmd := makeInstallCommand(target)
|
||||
debug("go %s", strings.Join(cmd, " "))
|
||||
c := exec.Command("go", cmd...) // nolint: gas
|
||||
c.Stdout = os.Stdout
|
||||
c.Stderr = os.Stderr
|
||||
if err := c.Run(); err != nil {
|
||||
warning("failed to install %s: %s", target, err)
|
||||
failed = append(failed, target)
|
||||
}
|
||||
}
|
||||
if len(failed) > 0 {
|
||||
kingpin.Fatalf("failed to install the following linters: %s", strings.Join(failed, ", "))
|
||||
}
|
||||
}
|
||||
|
||||
func installLinters() {
|
||||
names := make([]string, 0, len(installMap))
|
||||
targets := make([]string, 0, len(installMap))
|
||||
for name, target := range installMap {
|
||||
names = append(names, name)
|
||||
targets = append(targets, target)
|
||||
}
|
||||
namesStr := strings.Join(names, "\n ")
|
||||
if config.DownloadOnly {
|
||||
fmt.Printf("Downloading:\n %s\n", namesStr)
|
||||
} else {
|
||||
fmt.Printf("Installing:\n %s\n", namesStr)
|
||||
}
|
||||
err := installLintersWithOneCommand(targets)
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
warning("failed to install one or more linters: %s (installing individually)", err)
|
||||
installLintersIndividually(targets)
|
||||
}
|
||||
|
||||
func maybeAggregateIssues(issues chan *Issue) chan *Issue {
|
||||
if !config.Aggregate {
|
||||
return issues
|
||||
}
|
||||
return aggregateIssues(issues)
|
||||
}
|
||||
|
||||
func maybeSortIssues(issues chan *Issue) chan *Issue {
|
||||
if reflect.DeepEqual([]string{"none"}, config.Sort) {
|
||||
return issues
|
||||
}
|
||||
out := make(chan *Issue, 1000000)
|
||||
sorted := &sortedIssues{
|
||||
issues: []*Issue{},
|
||||
order: config.Sort,
|
||||
}
|
||||
go func() {
|
||||
for issue := range issues {
|
||||
sorted.issues = append(sorted.issues, issue)
|
||||
}
|
||||
sort.Sort(sorted)
|
||||
for _, issue := range sorted.issues {
|
||||
out <- issue
|
||||
}
|
||||
close(out)
|
||||
}()
|
||||
return out
|
||||
}
|
||||
|
||||
type linterState struct {
|
||||
*Linter
|
||||
path string
|
||||
issues chan *Issue
|
||||
vars Vars
|
||||
exclude *regexp.Regexp
|
||||
include *regexp.Regexp
|
||||
deadline <-chan time.Time
|
||||
}
|
||||
|
||||
func (l *linterState) InterpolatedCommand() string {
|
||||
vars := l.vars.Copy()
|
||||
if l.ShouldChdir() {
|
||||
vars["path"] = "."
|
||||
} else {
|
||||
vars["path"] = l.path
|
||||
}
|
||||
return vars.Replace(l.Command)
|
||||
}
|
||||
|
||||
func (l *linterState) ShouldChdir() bool {
|
||||
return config.Vendor || !strings.HasSuffix(l.path, "/...") || !strings.Contains(l.Command, "{path}")
|
||||
}
|
||||
|
||||
func parseCommand(dir, command string) (string, []string, error) {
|
||||
args, err := shlex.Split(command)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if len(args) == 0 {
|
||||
return "", nil, fmt.Errorf("invalid command %q", command)
|
||||
}
|
||||
exe, err := exec.LookPath(args[0])
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
out := []string{}
|
||||
for _, arg := range args[1:] {
|
||||
if strings.Contains(arg, "*") {
|
||||
pattern := filepath.Join(dir, arg)
|
||||
globbed, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
for i, g := range globbed {
|
||||
if strings.HasPrefix(g, dir+string(filepath.Separator)) {
|
||||
globbed[i] = g[len(dir)+1:]
|
||||
}
|
||||
}
|
||||
out = append(out, globbed...)
|
||||
} else {
|
||||
out = append(out, arg)
|
||||
}
|
||||
}
|
||||
return exe, out, nil
|
||||
}
|
||||
|
||||
func executeLinter(state *linterState) error {
|
||||
debug("linting with %s: %s (on %s)", state.Name, state.Command, state.path)
|
||||
|
||||
start := time.Now()
|
||||
command := state.InterpolatedCommand()
|
||||
exe, args, err := parseCommand(state.path, command)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
debug("executing %s %q", exe, args)
|
||||
buf := bytes.NewBuffer(nil)
|
||||
cmd := exec.Command(exe, args...) // nolint: gas
|
||||
if state.ShouldChdir() {
|
||||
cmd.Dir = state.path
|
||||
}
|
||||
cmd.Stdout = buf
|
||||
cmd.Stderr = buf
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute linter %s: %s", command, err)
|
||||
}
|
||||
|
||||
done := make(chan bool)
|
||||
go func() {
|
||||
err = cmd.Wait()
|
||||
done <- true
|
||||
}()
|
||||
|
||||
// Wait for process to complete or deadline to expire.
|
||||
select {
|
||||
case <-done:
|
||||
|
||||
case <-state.deadline:
|
||||
err = fmt.Errorf("deadline exceeded by linter %s on %s (try increasing --deadline)",
|
||||
state.Name, state.path)
|
||||
kerr := cmd.Process.Kill()
|
||||
if kerr != nil {
|
||||
warning("failed to kill %s: %s", state.Name, kerr)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
debug("warning: %s returned %s", command, err)
|
||||
}
|
||||
|
||||
processOutput(state, buf.Bytes())
|
||||
elapsed := time.Since(start)
|
||||
debug("%s linter took %s", state.Name, elapsed)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *linterState) fixPath(path string) string {
|
||||
lpath := strings.TrimSuffix(l.path, "...")
|
||||
labspath, _ := filepath.Abs(lpath)
|
||||
|
||||
if !l.ShouldChdir() {
|
||||
path = strings.TrimPrefix(path, lpath)
|
||||
}
|
||||
|
||||
if !filepath.IsAbs(path) {
|
||||
path, _ = filepath.Abs(filepath.Join(labspath, path))
|
||||
}
|
||||
if strings.HasPrefix(path, labspath) {
|
||||
return filepath.Join(lpath, strings.TrimPrefix(path, labspath))
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func lintersFromFlags() map[string]*Linter {
|
||||
out := map[string]*Linter{}
|
||||
for _, linter := range config.Enable {
|
||||
out[linter] = LinterFromName(linter)
|
||||
}
|
||||
for _, linter := range config.Disable {
|
||||
delete(out, linter)
|
||||
}
|
||||
if config.Fast {
|
||||
for _, linter := range slowLinters {
|
||||
delete(out, linter)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// nolint: gocyclo
|
||||
func processOutput(state *linterState, out []byte) {
|
||||
re := state.regex
|
||||
all := re.FindAllSubmatchIndex(out, -1)
|
||||
debug("%s hits %d: %s", state.Name, len(all), state.Pattern)
|
||||
for _, indices := range all {
|
||||
group := [][]byte{}
|
||||
for i := 0; i < len(indices); i += 2 {
|
||||
var fragment []byte
|
||||
if indices[i] != -1 {
|
||||
fragment = out[indices[i]:indices[i+1]]
|
||||
}
|
||||
group = append(group, fragment)
|
||||
}
|
||||
|
||||
issue := &Issue{Line: 1}
|
||||
issue.Linter = LinterFromName(state.Name)
|
||||
for i, name := range re.SubexpNames() {
|
||||
if group[i] == nil {
|
||||
continue
|
||||
}
|
||||
part := string(group[i])
|
||||
if name != "" {
|
||||
state.vars[name] = part
|
||||
}
|
||||
switch name {
|
||||
case "path":
|
||||
issue.Path = state.fixPath(part)
|
||||
|
||||
case "line":
|
||||
n, err := strconv.ParseInt(part, 10, 32)
|
||||
kingpin.FatalIfError(err, "line matched invalid integer")
|
||||
issue.Line = int(n)
|
||||
|
||||
case "col":
|
||||
n, err := strconv.ParseInt(part, 10, 32)
|
||||
kingpin.FatalIfError(err, "col matched invalid integer")
|
||||
issue.Col = int(n)
|
||||
|
||||
case "message":
|
||||
issue.Message = part
|
||||
|
||||
case "":
|
||||
}
|
||||
}
|
||||
if m, ok := config.MessageOverride[state.Name]; ok {
|
||||
issue.Message = state.vars.Replace(m)
|
||||
}
|
||||
if sev, ok := config.Severity[state.Name]; ok {
|
||||
issue.Severity = Severity(sev)
|
||||
} else {
|
||||
issue.Severity = "warning"
|
||||
}
|
||||
if state.exclude != nil && state.exclude.MatchString(issue.String()) {
|
||||
continue
|
||||
}
|
||||
if state.include != nil && !state.include.MatchString(issue.String()) {
|
||||
continue
|
||||
}
|
||||
state.issues <- issue
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func findVendoredLinters() string {
|
||||
gopaths := strings.Split(getGoPath(), string(os.PathListSeparator))
|
||||
for _, home := range vendoredSearchPaths {
|
||||
for _, p := range gopaths {
|
||||
joined := append([]string{p, "src"}, home...)
|
||||
vendorRoot := filepath.Join(joined...)
|
||||
if _, err := os.Stat(vendorRoot); err == nil {
|
||||
return vendorRoot
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
|
||||
}
|
||||
|
||||
// Go 1.8 compatible GOPATH.
|
||||
func getGoPath() string {
|
||||
path := os.Getenv("GOPATH")
|
||||
if path == "" {
|
||||
user, err := user.Current()
|
||||
kingpin.FatalIfError(err, "")
|
||||
path = filepath.Join(user.HomeDir, "go")
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// addPath appends p to paths and returns it if:
|
||||
// 1. p is not a blank string
|
||||
// 2. p doesn't already exist in paths
|
||||
// Otherwise paths is returned unchanged.
|
||||
func addPath(p string, paths []string) []string {
|
||||
if p == "" {
|
||||
return paths
|
||||
}
|
||||
for _, path := range paths {
|
||||
if p == path {
|
||||
return paths
|
||||
}
|
||||
}
|
||||
return append(paths, p)
|
||||
}
|
||||
|
||||
// Ensure all "bin" directories from GOPATH exists in PATH, as well as GOBIN if set.
|
||||
func configureEnvironment() {
|
||||
gopaths := strings.Split(getGoPath(), string(os.PathListSeparator))
|
||||
paths := strings.Split(os.Getenv("PATH"), string(os.PathListSeparator))
|
||||
gobin := os.Getenv("GOBIN")
|
||||
|
||||
if config.VendoredLinters && config.Install {
|
||||
vendorRoot := findVendoredLinters()
|
||||
if vendorRoot == "" {
|
||||
kingpin.Fatalf("could not find vendored linters in GOPATH=%q", getGoPath())
|
||||
}
|
||||
debug("found vendored linters at %s, updating environment", vendorRoot)
|
||||
if gobin == "" {
|
||||
gobin = filepath.Join(gopaths[0], "bin")
|
||||
}
|
||||
// "go install" panics when one GOPATH element is beneath another, so we just set
|
||||
// our vendor root instead.
|
||||
gopaths = []string{vendorRoot}
|
||||
}
|
||||
|
||||
for _, p := range gopaths {
|
||||
paths = addPath(filepath.Join(p, "bin"), paths)
|
||||
}
|
||||
paths = addPath(gobin, paths)
|
||||
|
||||
path := strings.Join(paths, string(os.PathListSeparator))
|
||||
gopath := strings.Join(gopaths, string(os.PathListSeparator))
|
||||
|
||||
if err := os.Setenv("PATH", path); err != nil {
|
||||
warning("setenv PATH: %s", err)
|
||||
}
|
||||
debug("PATH=%s", os.Getenv("PATH"))
|
||||
|
||||
if err := os.Setenv("GOPATH", gopath); err != nil {
|
||||
warning("setenv GOPATH: %s", err)
|
||||
}
|
||||
debug("GOPATH=%s", os.Getenv("GOPATH"))
|
||||
|
||||
if err := os.Setenv("GOBIN", gobin); err != nil {
|
||||
warning("setenv GOBIN: %s", err)
|
||||
}
|
||||
debug("GOBIN=%s", os.Getenv("GOBIN"))
|
||||
}
|
19
tools/vendor/github.com/alecthomas/units/COPYING
generated
vendored
Normal file
19
tools/vendor/github.com/alecthomas/units/COPYING
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright (C) 2014 Alec Thomas
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
11
tools/vendor/github.com/alecthomas/units/README.md
generated
vendored
Normal file
11
tools/vendor/github.com/alecthomas/units/README.md
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# Units - Helpful unit multipliers and functions for Go
|
||||
|
||||
The goal of this package is to have functionality similar to the [time](http://golang.org/pkg/time/) package.
|
||||
|
||||
It allows for code like this:
|
||||
|
||||
```go
|
||||
n, err := ParseBase2Bytes("1KB")
|
||||
// n == 1024
|
||||
n = units.Mebibyte * 512
|
||||
```
|
83
tools/vendor/github.com/alecthomas/units/bytes.go
generated
vendored
Normal file
83
tools/vendor/github.com/alecthomas/units/bytes.go
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
package units
|
||||
|
||||
// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte,
|
||||
// etc.).
|
||||
type Base2Bytes int64
|
||||
|
||||
// Base-2 byte units.
|
||||
const (
|
||||
Kibibyte Base2Bytes = 1024
|
||||
KiB = Kibibyte
|
||||
Mebibyte = Kibibyte * 1024
|
||||
MiB = Mebibyte
|
||||
Gibibyte = Mebibyte * 1024
|
||||
GiB = Gibibyte
|
||||
Tebibyte = Gibibyte * 1024
|
||||
TiB = Tebibyte
|
||||
Pebibyte = Tebibyte * 1024
|
||||
PiB = Pebibyte
|
||||
Exbibyte = Pebibyte * 1024
|
||||
EiB = Exbibyte
|
||||
)
|
||||
|
||||
var (
|
||||
bytesUnitMap = MakeUnitMap("iB", "B", 1024)
|
||||
oldBytesUnitMap = MakeUnitMap("B", "B", 1024)
|
||||
)
|
||||
|
||||
// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB
|
||||
// and KiB are both 1024.
|
||||
func ParseBase2Bytes(s string) (Base2Bytes, error) {
|
||||
n, err := ParseUnit(s, bytesUnitMap)
|
||||
if err != nil {
|
||||
n, err = ParseUnit(s, oldBytesUnitMap)
|
||||
}
|
||||
return Base2Bytes(n), err
|
||||
}
|
||||
|
||||
func (b Base2Bytes) String() string {
|
||||
return ToString(int64(b), 1024, "iB", "B")
|
||||
}
|
||||
|
||||
var (
|
||||
metricBytesUnitMap = MakeUnitMap("B", "B", 1000)
|
||||
)
|
||||
|
||||
// MetricBytes are SI byte units (1000 bytes in a kilobyte).
|
||||
type MetricBytes SI
|
||||
|
||||
// SI base-10 byte units.
|
||||
const (
|
||||
Kilobyte MetricBytes = 1000
|
||||
KB = Kilobyte
|
||||
Megabyte = Kilobyte * 1000
|
||||
MB = Megabyte
|
||||
Gigabyte = Megabyte * 1000
|
||||
GB = Gigabyte
|
||||
Terabyte = Gigabyte * 1000
|
||||
TB = Terabyte
|
||||
Petabyte = Terabyte * 1000
|
||||
PB = Petabyte
|
||||
Exabyte = Petabyte * 1000
|
||||
EB = Exabyte
|
||||
)
|
||||
|
||||
// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes.
|
||||
func ParseMetricBytes(s string) (MetricBytes, error) {
|
||||
n, err := ParseUnit(s, metricBytesUnitMap)
|
||||
return MetricBytes(n), err
|
||||
}
|
||||
|
||||
func (m MetricBytes) String() string {
|
||||
return ToString(int64(m), 1000, "B", "B")
|
||||
}
|
||||
|
||||
// ParseStrictBytes supports both iB and B suffixes for base 2 and metric,
|
||||
// respectively. That is, KiB represents 1024 and KB represents 1000.
|
||||
func ParseStrictBytes(s string) (int64, error) {
|
||||
n, err := ParseUnit(s, bytesUnitMap)
|
||||
if err != nil {
|
||||
n, err = ParseUnit(s, metricBytesUnitMap)
|
||||
}
|
||||
return int64(n), err
|
||||
}
|
13
tools/vendor/github.com/alecthomas/units/doc.go
generated
vendored
Normal file
13
tools/vendor/github.com/alecthomas/units/doc.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
// Package units provides helpful unit multipliers and functions for Go.
|
||||
//
|
||||
// The goal of this package is to have functionality similar to the time [1] package.
|
||||
//
|
||||
//
|
||||
// [1] http://golang.org/pkg/time/
|
||||
//
|
||||
// It allows for code like this:
|
||||
//
|
||||
// n, err := ParseBase2Bytes("1KB")
|
||||
// // n == 1024
|
||||
// n = units.Mebibyte * 512
|
||||
package units
|
26
tools/vendor/github.com/alecthomas/units/si.go
generated
vendored
Normal file
26
tools/vendor/github.com/alecthomas/units/si.go
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
package units
|
||||
|
||||
// SI units.
|
||||
type SI int64
|
||||
|
||||
// SI unit multiples.
|
||||
const (
|
||||
Kilo SI = 1000
|
||||
Mega = Kilo * 1000
|
||||
Giga = Mega * 1000
|
||||
Tera = Giga * 1000
|
||||
Peta = Tera * 1000
|
||||
Exa = Peta * 1000
|
||||
)
|
||||
|
||||
func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 {
|
||||
return map[string]float64{
|
||||
shortSuffix: 1,
|
||||
"K" + suffix: float64(scale),
|
||||
"M" + suffix: float64(scale * scale),
|
||||
"G" + suffix: float64(scale * scale * scale),
|
||||
"T" + suffix: float64(scale * scale * scale * scale),
|
||||
"P" + suffix: float64(scale * scale * scale * scale * scale),
|
||||
"E" + suffix: float64(scale * scale * scale * scale * scale * scale),
|
||||
}
|
||||
}
|
138
tools/vendor/github.com/alecthomas/units/util.go
generated
vendored
Normal file
138
tools/vendor/github.com/alecthomas/units/util.go
generated
vendored
Normal file
@ -0,0 +1,138 @@
|
||||
package units
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
siUnits = []string{"", "K", "M", "G", "T", "P", "E"}
|
||||
)
|
||||
|
||||
func ToString(n int64, scale int64, suffix, baseSuffix string) string {
|
||||
mn := len(siUnits)
|
||||
out := make([]string, mn)
|
||||
for i, m := range siUnits {
|
||||
if n%scale != 0 || i == 0 && n == 0 {
|
||||
s := suffix
|
||||
if i == 0 {
|
||||
s = baseSuffix
|
||||
}
|
||||
out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s)
|
||||
}
|
||||
n /= scale
|
||||
if n == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return strings.Join(out, "")
|
||||
}
|
||||
|
||||
// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123
|
||||
var errLeadingInt = errors.New("units: bad [0-9]*") // never printed
|
||||
|
||||
// leadingInt consumes the leading [0-9]* from s.
|
||||
func leadingInt(s string) (x int64, rem string, err error) {
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c < '0' || c > '9' {
|
||||
break
|
||||
}
|
||||
if x >= (1<<63-10)/10 {
|
||||
// overflow
|
||||
return 0, "", errLeadingInt
|
||||
}
|
||||
x = x*10 + int64(c) - '0'
|
||||
}
|
||||
return x, s[i:], nil
|
||||
}
|
||||
|
||||
func ParseUnit(s string, unitMap map[string]float64) (int64, error) {
|
||||
// [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+
|
||||
orig := s
|
||||
f := float64(0)
|
||||
neg := false
|
||||
|
||||
// Consume [-+]?
|
||||
if s != "" {
|
||||
c := s[0]
|
||||
if c == '-' || c == '+' {
|
||||
neg = c == '-'
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
// Special case: if all that is left is "0", this is zero.
|
||||
if s == "0" {
|
||||
return 0, nil
|
||||
}
|
||||
if s == "" {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
for s != "" {
|
||||
g := float64(0) // this element of the sequence
|
||||
|
||||
var x int64
|
||||
var err error
|
||||
|
||||
// The next character must be [0-9.]
|
||||
if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
// Consume [0-9]*
|
||||
pl := len(s)
|
||||
x, s, err = leadingInt(s)
|
||||
if err != nil {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
g = float64(x)
|
||||
pre := pl != len(s) // whether we consumed anything before a period
|
||||
|
||||
// Consume (\.[0-9]*)?
|
||||
post := false
|
||||
if s != "" && s[0] == '.' {
|
||||
s = s[1:]
|
||||
pl := len(s)
|
||||
x, s, err = leadingInt(s)
|
||||
if err != nil {
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
scale := 1.0
|
||||
for n := pl - len(s); n > 0; n-- {
|
||||
scale *= 10
|
||||
}
|
||||
g += float64(x) / scale
|
||||
post = pl != len(s)
|
||||
}
|
||||
if !pre && !post {
|
||||
// no digits (e.g. ".s" or "-.s")
|
||||
return 0, errors.New("units: invalid " + orig)
|
||||
}
|
||||
|
||||
// Consume unit.
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c == '.' || ('0' <= c && c <= '9') {
|
||||
break
|
||||
}
|
||||
}
|
||||
u := s[:i]
|
||||
s = s[i:]
|
||||
unit, ok := unitMap[u]
|
||||
if !ok {
|
||||
return 0, errors.New("units: unknown unit " + u + " in " + orig)
|
||||
}
|
||||
|
||||
f += g * unit
|
||||
}
|
||||
|
||||
if neg {
|
||||
f = -f
|
||||
}
|
||||
if f < float64(-1<<63) || f > float64(1<<63-1) {
|
||||
return 0, errors.New("units: overflow parsing unit")
|
||||
}
|
||||
return int64(f), nil
|
||||
}
|
24
tools/vendor/github.com/alexflint/go-arg/LICENSE
generated
vendored
Normal file
24
tools/vendor/github.com/alexflint/go-arg/LICENSE
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
Copyright (c) 2015, Alex Flint
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
321
tools/vendor/github.com/alexflint/go-arg/README.md
generated
vendored
Normal file
321
tools/vendor/github.com/alexflint/go-arg/README.md
generated
vendored
Normal file
@ -0,0 +1,321 @@
|
||||
[](https://godoc.org/github.com/alexflint/go-arg)
|
||||
[](https://travis-ci.org/alexflint/go-arg)
|
||||
[](https://coveralls.io/github/alexflint/go-arg?branch=master)
|
||||
[](https://goreportcard.com/badge/github.com/alexflint/go-arg)
|
||||
|
||||
## Structured argument parsing for Go
|
||||
|
||||
```shell
|
||||
go get github.com/alexflint/go-arg
|
||||
```
|
||||
|
||||
Declare the command line arguments your program accepts by defining a struct.
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Println(args.Foo, args.Bar)
|
||||
```
|
||||
|
||||
```shell
|
||||
$ ./example --foo=hello --bar
|
||||
hello true
|
||||
```
|
||||
|
||||
### Required arguments
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
ID int `arg:"required"`
|
||||
Timeout time.Duration
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
```
|
||||
|
||||
```shell
|
||||
$ ./example
|
||||
Usage: example --id ID [--timeout TIMEOUT]
|
||||
error: --id is required
|
||||
```
|
||||
|
||||
### Positional arguments
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
Input string `arg:"positional"`
|
||||
Output []string `arg:"positional"`
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Println("Input:", args.Input)
|
||||
fmt.Println("Output:", args.Output)
|
||||
```
|
||||
|
||||
```
|
||||
$ ./example src.txt x.out y.out z.out
|
||||
Input: src.txt
|
||||
Output: [x.out y.out z.out]
|
||||
```
|
||||
|
||||
### Environment variables
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
Workers int `arg:"env"`
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Println("Workers:", args.Workers)
|
||||
```
|
||||
|
||||
```
|
||||
$ WORKERS=4 ./example
|
||||
Workers: 4
|
||||
```
|
||||
|
||||
```
|
||||
$ WORKERS=4 ./example --workers=6
|
||||
Workers: 6
|
||||
```
|
||||
|
||||
You can also override the name of the environment variable:
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
Workers int `arg:"env:NUM_WORKERS"`
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Println("Workers:", args.Workers)
|
||||
```
|
||||
|
||||
```
|
||||
$ NUM_WORKERS=4 ./example
|
||||
Workers: 4
|
||||
```
|
||||
|
||||
### Usage strings
|
||||
```go
|
||||
var args struct {
|
||||
Input string `arg:"positional"`
|
||||
Output []string `arg:"positional"`
|
||||
Verbose bool `arg:"-v,help:verbosity level"`
|
||||
Dataset string `arg:"help:dataset to use"`
|
||||
Optimize int `arg:"-O,help:optimization level"`
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
```
|
||||
|
||||
```shell
|
||||
$ ./example -h
|
||||
Usage: [--verbose] [--dataset DATASET] [--optimize OPTIMIZE] [--help] INPUT [OUTPUT [OUTPUT ...]]
|
||||
|
||||
Positional arguments:
|
||||
INPUT
|
||||
OUTPUT
|
||||
|
||||
Options:
|
||||
--verbose, -v verbosity level
|
||||
--dataset DATASET dataset to use
|
||||
--optimize OPTIMIZE, -O OPTIMIZE
|
||||
optimization level
|
||||
--help, -h print this help message
|
||||
```
|
||||
|
||||
### Default values
|
||||
|
||||
```go
|
||||
var args struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}
|
||||
args.Foo = "default value"
|
||||
arg.MustParse(&args)
|
||||
```
|
||||
|
||||
### Arguments with multiple values
|
||||
```go
|
||||
var args struct {
|
||||
Database string
|
||||
IDs []int64
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Printf("Fetching the following IDs from %s: %q", args.Database, args.IDs)
|
||||
```
|
||||
|
||||
```shell
|
||||
./example -database foo -ids 1 2 3
|
||||
Fetching the following IDs from foo: [1 2 3]
|
||||
```
|
||||
|
||||
### Arguments that can be specified multiple times, mixed with positionals
|
||||
```go
|
||||
var args struct {
|
||||
Commands []string `arg:"-c,separate"`
|
||||
Files []string `arg:"-f,separate"`
|
||||
Databases []string `arg:"positional"`
|
||||
}
|
||||
```
|
||||
|
||||
```shell
|
||||
./example -c cmd1 db1 -f file1 db2 -c cmd2 -f file2 -f file3 db3 -c cmd3
|
||||
Commands: [cmd1 cmd2 cmd3]
|
||||
Files [file1 file2 file3]
|
||||
Databases [db1 db2 db3]
|
||||
```
|
||||
|
||||
### Custom validation
|
||||
```go
|
||||
var args struct {
|
||||
Foo string
|
||||
Bar string
|
||||
}
|
||||
p := arg.MustParse(&args)
|
||||
if args.Foo == "" && args.Bar == "" {
|
||||
p.Fail("you must provide one of --foo and --bar")
|
||||
}
|
||||
```
|
||||
|
||||
```shell
|
||||
./example
|
||||
Usage: samples [--foo FOO] [--bar BAR]
|
||||
error: you must provide one of --foo and --bar
|
||||
```
|
||||
|
||||
### Version strings
|
||||
|
||||
```go
|
||||
type args struct {
|
||||
...
|
||||
}
|
||||
|
||||
func (args) Version() string {
|
||||
return "someprogram 4.3.0"
|
||||
}
|
||||
|
||||
func main() {
|
||||
var args args
|
||||
arg.MustParse(&args)
|
||||
}
|
||||
```
|
||||
|
||||
```shell
|
||||
$ ./example --version
|
||||
someprogram 4.3.0
|
||||
```
|
||||
|
||||
### Embedded structs
|
||||
|
||||
The fields of embedded structs are treated just like regular fields:
|
||||
|
||||
```go
|
||||
|
||||
type DatabaseOptions struct {
|
||||
Host string
|
||||
Username string
|
||||
Password string
|
||||
}
|
||||
|
||||
type LogOptions struct {
|
||||
LogFile string
|
||||
Verbose bool
|
||||
}
|
||||
|
||||
func main() {
|
||||
var args struct {
|
||||
DatabaseOptions
|
||||
LogOptions
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
}
|
||||
```
|
||||
|
||||
As usual, any field tagged with `arg:"-"` is ignored.
|
||||
|
||||
### Custom parsing
|
||||
|
||||
You can implement your own argument parser by implementing `encoding.TextUnmarshaler`:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alexflint/go-arg"
|
||||
)
|
||||
|
||||
// Accepts command line arguments of the form "head.tail"
|
||||
type NameDotName struct {
|
||||
Head, Tail string
|
||||
}
|
||||
|
||||
func (n *NameDotName) UnmarshalText(b []byte) error {
|
||||
s := string(b)
|
||||
pos := strings.Index(s, ".")
|
||||
if pos == -1 {
|
||||
return fmt.Errorf("missing period in %s", s)
|
||||
}
|
||||
n.Head = s[:pos]
|
||||
n.Tail = s[pos+1:]
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
var args struct {
|
||||
Name *NameDotName
|
||||
}
|
||||
arg.MustParse(&args)
|
||||
fmt.Printf("%#v\n", args.Name)
|
||||
}
|
||||
```
|
||||
```shell
|
||||
$ ./example --name=foo.bar
|
||||
&main.NameDotName{Head:"foo", Tail:"bar"}
|
||||
|
||||
$ ./example --name=oops
|
||||
Usage: example [--name NAME]
|
||||
error: error processing --name: missing period in "oops"
|
||||
```
|
||||
|
||||
### Description strings
|
||||
|
||||
```go
|
||||
type args struct {
|
||||
Foo string
|
||||
}
|
||||
|
||||
func (args) Description() string {
|
||||
return "this program does this and that"
|
||||
}
|
||||
|
||||
func main() {
|
||||
var args args
|
||||
arg.MustParse(&args)
|
||||
}
|
||||
```
|
||||
|
||||
```shell
|
||||
$ ./example -h
|
||||
this program does this and that
|
||||
Usage: example [--foo FOO]
|
||||
|
||||
Options:
|
||||
--foo FOO
|
||||
--help, -h display this help and exit
|
||||
```
|
||||
|
||||
### Documentation
|
||||
|
||||
https://godoc.org/github.com/alexflint/go-arg
|
||||
|
||||
### Rationale
|
||||
|
||||
There are many command line argument parsing libraries for Go, including one in the standard library, so why build another?
|
||||
|
||||
The shortcomings of the `flag` library that ships in the standard library are well known. Positional arguments must preceed options, so `./prog x --foo=1` does what you expect but `./prog --foo=1 x` does not. Arguments cannot have both long (`--foo`) and short (`-f`) forms.
|
||||
|
||||
Many third-party argument parsing libraries are geared for writing sophisticated command line interfaces. The excellent `codegangsta/cli` is perfect for working with multiple sub-commands and nested flags, but is probably overkill for a simple script with a handful of flags.
|
||||
|
||||
The main idea behind `go-arg` is that Go already has an excellent way to describe data structures using Go structs, so there is no need to develop more levels of abstraction on top of this. Instead of one API to specify which arguments your program accepts, and then another API to get the values of those arguments, why not replace both with a single struct?
|
36
tools/vendor/github.com/alexflint/go-arg/doc.go
generated
vendored
Normal file
36
tools/vendor/github.com/alexflint/go-arg/doc.go
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
// Package arg parses command line arguments using the fields from a struct.
|
||||
//
|
||||
// For example,
|
||||
//
|
||||
// var args struct {
|
||||
// Iter int
|
||||
// Debug bool
|
||||
// }
|
||||
// arg.MustParse(&args)
|
||||
//
|
||||
// defines two command line arguments, which can be set using any of
|
||||
//
|
||||
// ./example --iter=1 --debug // debug is a boolean flag so its value is set to true
|
||||
// ./example -iter 1 // debug defaults to its zero value (false)
|
||||
// ./example --debug=true // iter defaults to its zero value (zero)
|
||||
//
|
||||
// The fastest way to see how to use go-arg is to read the examples below.
|
||||
//
|
||||
// Fields can be bool, string, any float type, or any signed or unsigned integer type.
|
||||
// They can also be slices of any of the above, or slices of pointers to any of the above.
|
||||
//
|
||||
// Tags can be specified using the `arg` package name:
|
||||
//
|
||||
// var args struct {
|
||||
// Input string `arg:"positional"`
|
||||
// Log string `arg:"positional,required"`
|
||||
// Debug bool `arg:"-d,help:turn on debug mode"`
|
||||
// RealMode bool `arg:"--real"
|
||||
// Wr io.Writer `arg:"-"`
|
||||
// }
|
||||
//
|
||||
// The valid tag strings are `positional`, `required`, and `help`. Further, any tag string
|
||||
// that starts with a single hyphen is the short form for an argument (e.g. `./example -d`),
|
||||
// and any tag string that starts with two hyphens is the long form for the argument
|
||||
// (instead of the field name). Fields can be excluded from processing with `arg:"-"`.
|
||||
package arg
|
485
tools/vendor/github.com/alexflint/go-arg/parse.go
generated
vendored
Normal file
485
tools/vendor/github.com/alexflint/go-arg/parse.go
generated
vendored
Normal file
@ -0,0 +1,485 @@
|
||||
package arg
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
scalar "github.com/alexflint/go-scalar"
|
||||
)
|
||||
|
||||
// spec represents a command line option
|
||||
type spec struct {
|
||||
dest reflect.Value
|
||||
long string
|
||||
short string
|
||||
multiple bool
|
||||
required bool
|
||||
positional bool
|
||||
separate bool
|
||||
help string
|
||||
env string
|
||||
wasPresent bool
|
||||
boolean bool
|
||||
}
|
||||
|
||||
// ErrHelp indicates that -h or --help were provided
|
||||
var ErrHelp = errors.New("help requested by user")
|
||||
|
||||
// ErrVersion indicates that --version was provided
|
||||
var ErrVersion = errors.New("version requested by user")
|
||||
|
||||
// MustParse processes command line arguments and exits upon failure
|
||||
func MustParse(dest ...interface{}) *Parser {
|
||||
p, err := NewParser(Config{}, dest...)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(-1)
|
||||
}
|
||||
err = p.Parse(flags())
|
||||
if err == ErrHelp {
|
||||
p.WriteHelp(os.Stdout)
|
||||
os.Exit(0)
|
||||
}
|
||||
if err == ErrVersion {
|
||||
fmt.Println(p.version)
|
||||
os.Exit(0)
|
||||
}
|
||||
if err != nil {
|
||||
p.Fail(err.Error())
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// Parse processes command line arguments and stores them in dest
|
||||
func Parse(dest ...interface{}) error {
|
||||
p, err := NewParser(Config{}, dest...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return p.Parse(flags())
|
||||
}
|
||||
|
||||
// flags gets all command line arguments other than the first (program name)
|
||||
func flags() []string {
|
||||
if len(os.Args) == 0 { // os.Args could be empty
|
||||
return nil
|
||||
}
|
||||
return os.Args[1:]
|
||||
}
|
||||
|
||||
// Config represents configuration options for an argument parser
|
||||
type Config struct {
|
||||
Program string // Program is the name of the program used in the help text
|
||||
}
|
||||
|
||||
// Parser represents a set of command line options with destination values
|
||||
type Parser struct {
|
||||
spec []*spec
|
||||
config Config
|
||||
version string
|
||||
description string
|
||||
}
|
||||
|
||||
// Versioned is the interface that the destination struct should implement to
|
||||
// make a version string appear at the top of the help message.
|
||||
type Versioned interface {
|
||||
// Version returns the version string that will be printed on a line by itself
|
||||
// at the top of the help message.
|
||||
Version() string
|
||||
}
|
||||
|
||||
// Described is the interface that the destination struct should implement to
|
||||
// make a description string appear at the top of the help message.
|
||||
type Described interface {
|
||||
// Description returns the string that will be printed on a line by itself
|
||||
// at the top of the help message.
|
||||
Description() string
|
||||
}
|
||||
|
||||
// walkFields calls a function for each field of a struct, recursively expanding struct fields.
|
||||
func walkFields(v reflect.Value, visit func(field reflect.StructField, val reflect.Value, owner reflect.Type) bool) {
|
||||
t := v.Type()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
val := v.Field(i)
|
||||
expand := visit(field, val, t)
|
||||
if expand && field.Type.Kind() == reflect.Struct {
|
||||
walkFields(val, visit)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NewParser constructs a parser from a list of destination structs
|
||||
func NewParser(config Config, dests ...interface{}) (*Parser, error) {
|
||||
p := Parser{
|
||||
config: config,
|
||||
}
|
||||
for _, dest := range dests {
|
||||
if dest, ok := dest.(Versioned); ok {
|
||||
p.version = dest.Version()
|
||||
}
|
||||
if dest, ok := dest.(Described); ok {
|
||||
p.description = dest.Description()
|
||||
}
|
||||
v := reflect.ValueOf(dest)
|
||||
if v.Kind() != reflect.Ptr {
|
||||
panic(fmt.Sprintf("%s is not a pointer (did you forget an ampersand?)", v.Type()))
|
||||
}
|
||||
v = v.Elem()
|
||||
if v.Kind() != reflect.Struct {
|
||||
panic(fmt.Sprintf("%T is not a struct pointer", dest))
|
||||
}
|
||||
|
||||
var errs []string
|
||||
walkFields(v, func(field reflect.StructField, val reflect.Value, t reflect.Type) bool {
|
||||
// Check for the ignore switch in the tag
|
||||
tag := field.Tag.Get("arg")
|
||||
if tag == "-" {
|
||||
return false
|
||||
}
|
||||
|
||||
// If this is an embedded struct then recurse into its fields
|
||||
if field.Anonymous && field.Type.Kind() == reflect.Struct {
|
||||
return true
|
||||
}
|
||||
|
||||
spec := spec{
|
||||
long: strings.ToLower(field.Name),
|
||||
dest: val,
|
||||
}
|
||||
|
||||
// Check whether this field is supported. It's good to do this here rather than
|
||||
// wait until setScalar because it means that a program with invalid argument
|
||||
// fields will always fail regardless of whether the arguments it received
|
||||
// exercised those fields.
|
||||
var parseable bool
|
||||
parseable, spec.boolean, spec.multiple = canParse(field.Type)
|
||||
if !parseable {
|
||||
errs = append(errs, fmt.Sprintf("%s.%s: %s fields are not supported",
|
||||
t.Name(), field.Name, field.Type.String()))
|
||||
return false
|
||||
}
|
||||
|
||||
// Look at the tag
|
||||
if tag != "" {
|
||||
for _, key := range strings.Split(tag, ",") {
|
||||
var value string
|
||||
if pos := strings.Index(key, ":"); pos != -1 {
|
||||
value = key[pos+1:]
|
||||
key = key[:pos]
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(key, "---"):
|
||||
errs = append(errs, fmt.Sprintf("%s.%s: too many hyphens", t.Name(), field.Name))
|
||||
case strings.HasPrefix(key, "--"):
|
||||
spec.long = key[2:]
|
||||
case strings.HasPrefix(key, "-"):
|
||||
if len(key) != 2 {
|
||||
errs = append(errs, fmt.Sprintf("%s.%s: short arguments must be one character only",
|
||||
t.Name(), field.Name))
|
||||
return false
|
||||
}
|
||||
spec.short = key[1:]
|
||||
case key == "required":
|
||||
spec.required = true
|
||||
case key == "positional":
|
||||
spec.positional = true
|
||||
case key == "separate":
|
||||
spec.separate = true
|
||||
case key == "help":
|
||||
spec.help = value
|
||||
case key == "env":
|
||||
// Use override name if provided
|
||||
if value != "" {
|
||||
spec.env = value
|
||||
} else {
|
||||
spec.env = strings.ToUpper(field.Name)
|
||||
}
|
||||
default:
|
||||
errs = append(errs, fmt.Sprintf("unrecognized tag '%s' on field %s", key, tag))
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
p.spec = append(p.spec, &spec)
|
||||
|
||||
// if this was an embedded field then we already returned true up above
|
||||
return false
|
||||
})
|
||||
|
||||
if len(errs) > 0 {
|
||||
return nil, errors.New(strings.Join(errs, "\n"))
|
||||
}
|
||||
}
|
||||
if p.config.Program == "" {
|
||||
p.config.Program = "program"
|
||||
if len(os.Args) > 0 {
|
||||
p.config.Program = filepath.Base(os.Args[0])
|
||||
}
|
||||
}
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
// Parse processes the given command line option, storing the results in the field
|
||||
// of the structs from which NewParser was constructed
|
||||
func (p *Parser) Parse(args []string) error {
|
||||
// If -h or --help were specified then print usage
|
||||
for _, arg := range args {
|
||||
if arg == "-h" || arg == "--help" {
|
||||
return ErrHelp
|
||||
}
|
||||
if arg == "--version" {
|
||||
return ErrVersion
|
||||
}
|
||||
if arg == "--" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Process all command line arguments
|
||||
err := process(p.spec, args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate
|
||||
return validate(p.spec)
|
||||
}
|
||||
|
||||
// process goes through arguments one-by-one, parses them, and assigns the result to
|
||||
// the underlying struct field
|
||||
func process(specs []*spec, args []string) error {
|
||||
// construct a map from --option to spec
|
||||
optionMap := make(map[string]*spec)
|
||||
for _, spec := range specs {
|
||||
if spec.positional {
|
||||
continue
|
||||
}
|
||||
if spec.long != "" {
|
||||
optionMap[spec.long] = spec
|
||||
}
|
||||
if spec.short != "" {
|
||||
optionMap[spec.short] = spec
|
||||
}
|
||||
if spec.env != "" {
|
||||
if value, found := os.LookupEnv(spec.env); found {
|
||||
err := setScalar(spec.dest, value)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error processing environment variable %s: %v", spec.env, err)
|
||||
}
|
||||
spec.wasPresent = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// process each string from the command line
|
||||
var allpositional bool
|
||||
var positionals []string
|
||||
|
||||
// must use explicit for loop, not range, because we manipulate i inside the loop
|
||||
for i := 0; i < len(args); i++ {
|
||||
arg := args[i]
|
||||
if arg == "--" {
|
||||
allpositional = true
|
||||
continue
|
||||
}
|
||||
|
||||
if !isFlag(arg) || allpositional {
|
||||
positionals = append(positionals, arg)
|
||||
continue
|
||||
}
|
||||
|
||||
// check for an equals sign, as in "--foo=bar"
|
||||
var value string
|
||||
opt := strings.TrimLeft(arg, "-")
|
||||
if pos := strings.Index(opt, "="); pos != -1 {
|
||||
value = opt[pos+1:]
|
||||
opt = opt[:pos]
|
||||
}
|
||||
|
||||
// lookup the spec for this option
|
||||
spec, ok := optionMap[opt]
|
||||
if !ok {
|
||||
return fmt.Errorf("unknown argument %s", arg)
|
||||
}
|
||||
spec.wasPresent = true
|
||||
|
||||
// deal with the case of multiple values
|
||||
if spec.multiple {
|
||||
var values []string
|
||||
if value == "" {
|
||||
for i+1 < len(args) && !isFlag(args[i+1]) {
|
||||
values = append(values, args[i+1])
|
||||
i++
|
||||
if spec.separate {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
values = append(values, value)
|
||||
}
|
||||
err := setSlice(spec.dest, values, !spec.separate)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error processing %s: %v", arg, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// if it's a flag and it has no value then set the value to true
|
||||
// use boolean because this takes account of TextUnmarshaler
|
||||
if spec.boolean && value == "" {
|
||||
value = "true"
|
||||
}
|
||||
|
||||
// if we have something like "--foo" then the value is the next argument
|
||||
if value == "" {
|
||||
if i+1 == len(args) || isFlag(args[i+1]) {
|
||||
return fmt.Errorf("missing value for %s", arg)
|
||||
}
|
||||
value = args[i+1]
|
||||
i++
|
||||
}
|
||||
|
||||
err := setScalar(spec.dest, value)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error processing %s: %v", arg, err)
|
||||
}
|
||||
}
|
||||
|
||||
// process positionals
|
||||
for _, spec := range specs {
|
||||
if spec.positional {
|
||||
if spec.multiple {
|
||||
if spec.required && len(positionals) == 0 {
|
||||
return fmt.Errorf("%s is required", spec.long)
|
||||
}
|
||||
err := setSlice(spec.dest, positionals, true)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error processing %s: %v", spec.long, err)
|
||||
}
|
||||
positionals = nil
|
||||
} else if len(positionals) > 0 {
|
||||
err := setScalar(spec.dest, positionals[0])
|
||||
if err != nil {
|
||||
return fmt.Errorf("error processing %s: %v", spec.long, err)
|
||||
}
|
||||
positionals = positionals[1:]
|
||||
} else if spec.required {
|
||||
return fmt.Errorf("%s is required", spec.long)
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(positionals) > 0 {
|
||||
return fmt.Errorf("too many positional arguments at '%s'", positionals[0])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// isFlag returns true if a token is a flag such as "-v" or "--user" but not "-" or "--"
|
||||
func isFlag(s string) bool {
|
||||
return strings.HasPrefix(s, "-") && strings.TrimLeft(s, "-") != ""
|
||||
}
|
||||
|
||||
// validate an argument spec after arguments have been parse
|
||||
func validate(spec []*spec) error {
|
||||
for _, arg := range spec {
|
||||
if !arg.positional && arg.required && !arg.wasPresent {
|
||||
return fmt.Errorf("--%s is required", arg.long)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// parse a value as the appropriate type and store it in the struct
|
||||
func setSlice(dest reflect.Value, values []string, trunc bool) error {
|
||||
if !dest.CanSet() {
|
||||
return fmt.Errorf("field is not writable")
|
||||
}
|
||||
|
||||
var ptr bool
|
||||
elem := dest.Type().Elem()
|
||||
if elem.Kind() == reflect.Ptr {
|
||||
ptr = true
|
||||
elem = elem.Elem()
|
||||
}
|
||||
|
||||
// Truncate the dest slice in case default values exist
|
||||
if trunc && !dest.IsNil() {
|
||||
dest.SetLen(0)
|
||||
}
|
||||
|
||||
for _, s := range values {
|
||||
v := reflect.New(elem)
|
||||
if err := setScalar(v.Elem(), s); err != nil {
|
||||
return err
|
||||
}
|
||||
if !ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
dest.Set(reflect.Append(dest, v))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// canParse returns true if the type can be parsed from a string
|
||||
func canParse(t reflect.Type) (parseable, boolean, multiple bool) {
|
||||
parseable, boolean = isScalar(t)
|
||||
if parseable {
|
||||
return
|
||||
}
|
||||
|
||||
// Look inside pointer types
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
// Look inside slice types
|
||||
if t.Kind() == reflect.Slice {
|
||||
multiple = true
|
||||
t = t.Elem()
|
||||
}
|
||||
|
||||
parseable, boolean = isScalar(t)
|
||||
if parseable {
|
||||
return
|
||||
}
|
||||
|
||||
// Look inside pointer types (again, in case of []*Type)
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
|
||||
parseable, boolean = isScalar(t)
|
||||
if parseable {
|
||||
return
|
||||
}
|
||||
|
||||
return false, false, false
|
||||
}
|
||||
|
||||
var textUnmarshalerType = reflect.TypeOf([]encoding.TextUnmarshaler{}).Elem()
|
||||
|
||||
// isScalar returns true if the type can be parsed from a single string
|
||||
func isScalar(t reflect.Type) (parseable, boolean bool) {
|
||||
parseable = scalar.CanParse(t)
|
||||
switch {
|
||||
case t.Implements(textUnmarshalerType):
|
||||
return parseable, false
|
||||
case t.Kind() == reflect.Bool:
|
||||
return parseable, true
|
||||
case t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Bool:
|
||||
return parseable, true
|
||||
default:
|
||||
return parseable, false
|
||||
}
|
||||
}
|
||||
|
||||
// set a value from a string
|
||||
func setScalar(v reflect.Value, s string) error {
|
||||
return scalar.ParseValue(v, s)
|
||||
}
|
148
tools/vendor/github.com/alexflint/go-arg/usage.go
generated
vendored
Normal file
148
tools/vendor/github.com/alexflint/go-arg/usage.go
generated
vendored
Normal file
@ -0,0 +1,148 @@
|
||||
package arg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// the width of the left column
|
||||
const colWidth = 25
|
||||
|
||||
// Fail prints usage information to stderr and exits with non-zero status
|
||||
func (p *Parser) Fail(msg string) {
|
||||
p.WriteUsage(os.Stderr)
|
||||
fmt.Fprintln(os.Stderr, "error:", msg)
|
||||
os.Exit(-1)
|
||||
}
|
||||
|
||||
// WriteUsage writes usage information to the given writer
|
||||
func (p *Parser) WriteUsage(w io.Writer) {
|
||||
var positionals, options []*spec
|
||||
for _, spec := range p.spec {
|
||||
if spec.positional {
|
||||
positionals = append(positionals, spec)
|
||||
} else {
|
||||
options = append(options, spec)
|
||||
}
|
||||
}
|
||||
|
||||
if p.version != "" {
|
||||
fmt.Fprintln(w, p.version)
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, "Usage: %s", p.config.Program)
|
||||
|
||||
// write the option component of the usage message
|
||||
for _, spec := range options {
|
||||
// prefix with a space
|
||||
fmt.Fprint(w, " ")
|
||||
if !spec.required {
|
||||
fmt.Fprint(w, "[")
|
||||
}
|
||||
fmt.Fprint(w, synopsis(spec, "--"+spec.long))
|
||||
if !spec.required {
|
||||
fmt.Fprint(w, "]")
|
||||
}
|
||||
}
|
||||
|
||||
// write the positional component of the usage message
|
||||
for _, spec := range positionals {
|
||||
// prefix with a space
|
||||
fmt.Fprint(w, " ")
|
||||
up := strings.ToUpper(spec.long)
|
||||
if spec.multiple {
|
||||
if !spec.required {
|
||||
fmt.Fprint(w, "[")
|
||||
}
|
||||
fmt.Fprintf(w, "%s [%s ...]", up, up)
|
||||
if !spec.required {
|
||||
fmt.Fprint(w, "]")
|
||||
}
|
||||
} else {
|
||||
fmt.Fprint(w, up)
|
||||
}
|
||||
}
|
||||
fmt.Fprint(w, "\n")
|
||||
}
|
||||
|
||||
// WriteHelp writes the usage string followed by the full help string for each option
|
||||
func (p *Parser) WriteHelp(w io.Writer) {
|
||||
var positionals, options []*spec
|
||||
for _, spec := range p.spec {
|
||||
if spec.positional {
|
||||
positionals = append(positionals, spec)
|
||||
} else {
|
||||
options = append(options, spec)
|
||||
}
|
||||
}
|
||||
|
||||
if p.description != "" {
|
||||
fmt.Fprintln(w, p.description)
|
||||
}
|
||||
p.WriteUsage(w)
|
||||
|
||||
// write the list of positionals
|
||||
if len(positionals) > 0 {
|
||||
fmt.Fprint(w, "\nPositional arguments:\n")
|
||||
for _, spec := range positionals {
|
||||
left := " " + strings.ToUpper(spec.long)
|
||||
fmt.Fprint(w, left)
|
||||
if spec.help != "" {
|
||||
if len(left)+2 < colWidth {
|
||||
fmt.Fprint(w, strings.Repeat(" ", colWidth-len(left)))
|
||||
} else {
|
||||
fmt.Fprint(w, "\n"+strings.Repeat(" ", colWidth))
|
||||
}
|
||||
fmt.Fprint(w, spec.help)
|
||||
}
|
||||
fmt.Fprint(w, "\n")
|
||||
}
|
||||
}
|
||||
|
||||
// write the list of options
|
||||
fmt.Fprint(w, "\nOptions:\n")
|
||||
for _, spec := range options {
|
||||
printOption(w, spec)
|
||||
}
|
||||
|
||||
// write the list of built in options
|
||||
printOption(w, &spec{boolean: true, long: "help", short: "h", help: "display this help and exit"})
|
||||
if p.version != "" {
|
||||
printOption(w, &spec{boolean: true, long: "version", help: "display version and exit"})
|
||||
}
|
||||
}
|
||||
|
||||
func printOption(w io.Writer, spec *spec) {
|
||||
left := " " + synopsis(spec, "--"+spec.long)
|
||||
if spec.short != "" {
|
||||
left += ", " + synopsis(spec, "-"+spec.short)
|
||||
}
|
||||
fmt.Fprint(w, left)
|
||||
if spec.help != "" {
|
||||
if len(left)+2 < colWidth {
|
||||
fmt.Fprint(w, strings.Repeat(" ", colWidth-len(left)))
|
||||
} else {
|
||||
fmt.Fprint(w, "\n"+strings.Repeat(" ", colWidth))
|
||||
}
|
||||
fmt.Fprint(w, spec.help)
|
||||
}
|
||||
// If spec.dest is not the zero value then a default value has been added.
|
||||
v := spec.dest
|
||||
if v.IsValid() {
|
||||
z := reflect.Zero(v.Type())
|
||||
if (v.Type().Comparable() && z.Type().Comparable() && v.Interface() != z.Interface()) || v.Kind() == reflect.Slice && !v.IsNil() {
|
||||
fmt.Fprintf(w, " [default: %v]", v)
|
||||
}
|
||||
}
|
||||
fmt.Fprint(w, "\n")
|
||||
}
|
||||
|
||||
func synopsis(spec *spec, form string) string {
|
||||
if spec.boolean {
|
||||
return form
|
||||
}
|
||||
return form + " " + strings.ToUpper(spec.long)
|
||||
}
|
24
tools/vendor/github.com/alexflint/go-scalar/LICENSE
generated
vendored
Normal file
24
tools/vendor/github.com/alexflint/go-scalar/LICENSE
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
Copyright (c) 2015, Alex Flint
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
28
tools/vendor/github.com/alexflint/go-scalar/README.md
generated
vendored
Normal file
28
tools/vendor/github.com/alexflint/go-scalar/README.md
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
[](https://godoc.org/github.com/alexflint/go-scalar)
|
||||
[](https://travis-ci.org/alexflint/go-scalar)
|
||||
[](https://coveralls.io/github/alexflint/go-scalar?branch=master)
|
||||
[](https://goreportcard.com/badge/github.com/alexflint/go-scalar)
|
||||
|
||||
## Scalar parsing library
|
||||
|
||||
Scalar is a library for parsing strings into arbitrary scalars (integers,
|
||||
floats, strings, booleans, etc). It is helpful for tasks such as parsing
|
||||
strings passed as environment variables or command line arguments.
|
||||
|
||||
```shell
|
||||
go get github.com/alexflint/go-scalar
|
||||
```
|
||||
|
||||
The main API works as follows:
|
||||
|
||||
```go
|
||||
var value int
|
||||
err := scalar.Parse(&value, "123")
|
||||
```
|
||||
|
||||
There is also a variant that takes a `reflect.Value`:
|
||||
|
||||
```go
|
||||
var value int
|
||||
err := scalar.ParseValue(reflect.ValueOf(&value), "123")
|
||||
```
|
154
tools/vendor/github.com/alexflint/go-scalar/scalar.go
generated
vendored
Normal file
154
tools/vendor/github.com/alexflint/go-scalar/scalar.go
generated
vendored
Normal file
@ -0,0 +1,154 @@
|
||||
// Package scalar parses strings into values of scalar type.
|
||||
|
||||
package scalar
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/mail"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// The reflected form of some special types
|
||||
var (
|
||||
textUnmarshalerType = reflect.TypeOf([]encoding.TextUnmarshaler{}).Elem()
|
||||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
mailAddressType = reflect.TypeOf(mail.Address{})
|
||||
ipType = reflect.TypeOf(net.IP{})
|
||||
macType = reflect.TypeOf(net.HardwareAddr{})
|
||||
)
|
||||
|
||||
var (
|
||||
errNotSettable = errors.New("value is not settable")
|
||||
errPtrNotSettable = errors.New("value is a nil pointer and is not settable")
|
||||
)
|
||||
|
||||
// Parse assigns a value to v by parsing s.
|
||||
func Parse(dest interface{}, s string) error {
|
||||
return ParseValue(reflect.ValueOf(dest), s)
|
||||
}
|
||||
|
||||
// ParseValue assigns a value to v by parsing s.
|
||||
func ParseValue(v reflect.Value, s string) error {
|
||||
// If we have a nil pointer then allocate a new object
|
||||
if v.Kind() == reflect.Ptr && v.IsNil() {
|
||||
if !v.CanSet() {
|
||||
return errPtrNotSettable
|
||||
}
|
||||
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
|
||||
// If it implements encoding.TextUnmarshaler then use that
|
||||
if scalar, ok := v.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return scalar.UnmarshalText([]byte(s))
|
||||
}
|
||||
|
||||
// If we have a pointer then dereference it
|
||||
if v.Kind() == reflect.Ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
|
||||
if !v.CanSet() {
|
||||
return errNotSettable
|
||||
}
|
||||
|
||||
// Switch on concrete type
|
||||
switch scalar := v.Interface(); scalar.(type) {
|
||||
case time.Duration:
|
||||
duration, err := time.ParseDuration(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Set(reflect.ValueOf(duration))
|
||||
return nil
|
||||
case mail.Address:
|
||||
addr, err := mail.ParseAddress(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Set(reflect.ValueOf(*addr))
|
||||
return nil
|
||||
case net.IP:
|
||||
ip := net.ParseIP(s)
|
||||
if ip == nil {
|
||||
return fmt.Errorf(`invalid IP address: "%s"`, s)
|
||||
}
|
||||
v.Set(reflect.ValueOf(ip))
|
||||
return nil
|
||||
case net.HardwareAddr:
|
||||
ip, err := net.ParseMAC(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Set(reflect.ValueOf(ip))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Switch on kind so that we can handle derived types
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
v.SetString(s)
|
||||
case reflect.Bool:
|
||||
x, err := strconv.ParseBool(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetBool(x)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
x, err := strconv.ParseInt(s, 10, v.Type().Bits())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetInt(x)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
x, err := strconv.ParseUint(s, 10, v.Type().Bits())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetUint(x)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
x, err := strconv.ParseFloat(s, v.Type().Bits())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.SetFloat(x)
|
||||
default:
|
||||
return fmt.Errorf("cannot parse into %v", v.Type())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// CanParse returns true if the type can be parsed from a string.
|
||||
func CanParse(t reflect.Type) bool {
|
||||
// If it implements encoding.TextUnmarshaler then use that
|
||||
if t.Implements(textUnmarshalerType) {
|
||||
return true
|
||||
}
|
||||
|
||||
// If we have a pointer then dereference it
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
|
||||
// Check for other special types
|
||||
switch t {
|
||||
case durationType, mailAddressType, ipType, macType:
|
||||
return true
|
||||
}
|
||||
|
||||
// Fall back to checking the kind
|
||||
switch t.Kind() {
|
||||
case reflect.Bool:
|
||||
return true
|
||||
case reflect.String, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
|
||||
reflect.Float32, reflect.Float64:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
37
tools/vendor/github.com/client9/misspell/Dockerfile
generated
vendored
Normal file
37
tools/vendor/github.com/client9/misspell/Dockerfile
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
FROM golang:1.8.1-alpine
|
||||
MAINTAINER https://github.com/client9/misspell
|
||||
|
||||
# cache buster
|
||||
RUN echo 3
|
||||
|
||||
# git is needed for "go get" below
|
||||
RUN apk add --no-cache git make
|
||||
|
||||
# these are my standard testing / linting tools
|
||||
RUN /bin/true \
|
||||
&& go get -u github.com/alecthomas/gometalinter \
|
||||
&& gometalinter --install \
|
||||
&& rm -rf /go/src /go/pkg
|
||||
#
|
||||
# * SCOWL word list
|
||||
#
|
||||
# Downloads
|
||||
# http://wordlist.aspell.net/dicts/
|
||||
# --> http://app.aspell.net/create
|
||||
#
|
||||
|
||||
# use en_US large size
|
||||
# use regular size for others
|
||||
ENV SOURCE_US_BIG http://app.aspell.net/create?max_size=70&spelling=US&max_variant=2&diacritic=both&special=hacker&special=roman-numerals&download=wordlist&encoding=utf-8&format=inline
|
||||
|
||||
# should be able tell difference between English variations using this
|
||||
ENV SOURCE_US http://app.aspell.net/create?max_size=60&spelling=US&max_variant=1&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_GB_ISE http://app.aspell.net/create?max_size=60&spelling=GBs&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_GB_IZE http://app.aspell.net/create?max_size=60&spelling=GBz&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_CA http://app.aspell.net/create?max_size=60&spelling=CA&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
|
||||
RUN /bin/true \
|
||||
&& mkdir /scowl-wl \
|
||||
&& wget -O /scowl-wl/words-US-60.txt ${SOURCE_US} \
|
||||
&& wget -O /scowl-wl/words-GB-ise-60.txt ${SOURCE_GB_ISE}
|
||||
|
22
tools/vendor/github.com/client9/misspell/LICENSE
generated
vendored
Normal file
22
tools/vendor/github.com/client9/misspell/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2017 Nick Galbreath
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
84
tools/vendor/github.com/client9/misspell/Makefile
generated
vendored
Normal file
84
tools/vendor/github.com/client9/misspell/Makefile
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
CONTAINER=nickg/misspell
|
||||
|
||||
install: ## install misspell into GOPATH/bin
|
||||
go install ./cmd/misspell
|
||||
|
||||
build: hooks ## build and lint misspell
|
||||
go install ./cmd/misspell
|
||||
gometalinter \
|
||||
--vendor \
|
||||
--deadline=60s \
|
||||
--disable-all \
|
||||
--enable=vet \
|
||||
--enable=golint \
|
||||
--enable=gofmt \
|
||||
--enable=goimports \
|
||||
--enable=gosimple \
|
||||
--enable=staticcheck \
|
||||
--enable=ineffassign \
|
||||
--exclude=/usr/local/go/src/net/lookup_unix.go \
|
||||
./...
|
||||
go test .
|
||||
|
||||
test: ## run all tests
|
||||
go test .
|
||||
|
||||
# the grep in line 2 is to remove misspellings in the spelling dictionary
|
||||
# that trigger false positives!!
|
||||
falsepositives: /scowl-wl
|
||||
cat /scowl-wl/words-US-60.txt | \
|
||||
grep -i -v -E "payed|Tyre|Euclidian|nonoccurence|dependancy|reenforced|accidently|surprize|dependance|idealogy|binominal|causalities|conquerer|withing|casette|analyse|analogue|dialogue|paralyse|catalogue|archaeolog|clarinettist|catalyses|cancell|chisell|ageing|cataloguing" | \
|
||||
misspell -debug -error
|
||||
cat /scowl-wl/words-GB-ise-60.txt | \
|
||||
grep -v -E "payed|nonoccurence|withing" | \
|
||||
misspell -locale=UK -debug -error
|
||||
# cat /scowl-wl/words-GB-ize-60.txt | \
|
||||
# grep -v -E "withing" | \
|
||||
# misspell -debug -error
|
||||
# cat /scowl-wl/words-CA-60.txt | \
|
||||
# grep -v -E "withing" | \
|
||||
# misspell -debug -error
|
||||
|
||||
bench: ## run benchmarks
|
||||
go test -bench '.*'
|
||||
|
||||
clean: ## clean up time
|
||||
rm -rf dist/ bin/
|
||||
go clean ./...
|
||||
git gc --aggressive
|
||||
|
||||
ci: ## run test like travis-ci does, requires docker
|
||||
docker run --rm \
|
||||
-v $(PWD):/go/src/github.com/client9/misspell \
|
||||
-w /go/src/github.com/client9/misspell \
|
||||
${CONTAINER} \
|
||||
make build falsepositives
|
||||
|
||||
docker-build: ## build a docker test image
|
||||
docker build -t ${CONTAINER} .
|
||||
|
||||
docker-pull: ## pull latest test image
|
||||
docker pull ${CONTAINER}
|
||||
|
||||
docker-console: ## log into the test image
|
||||
docker run --rm -it \
|
||||
-v $(PWD):/go/src/github.com/client9/misspell \
|
||||
-w /go/src/github.com/client9/misspell \
|
||||
${CONTAINER} sh
|
||||
|
||||
.git/hooks/pre-commit: scripts/pre-commit.sh
|
||||
cp -f scripts/pre-commit.sh .git/hooks/pre-commit
|
||||
.git/hooks/commit-msg: scripts/commit-msg.sh
|
||||
cp -f scripts/commit-msg.sh .git/hooks/commit-msg
|
||||
hooks: .git/hooks/pre-commit .git/hooks/commit-msg ## install git precommit hooks
|
||||
|
||||
.PHONY: help ci console docker-build bench
|
||||
|
||||
# https://www.client9.com/self-documenting-makefiles/
|
||||
help:
|
||||
@awk -F ':|##' '/^[^\t].+?:.*?##/ {\
|
||||
printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \
|
||||
}' $(MAKEFILE_LIST)
|
||||
.DEFAULT_GOAL=help
|
||||
.PHONY=help
|
||||
|
406
tools/vendor/github.com/client9/misspell/README.md
generated
vendored
Normal file
406
tools/vendor/github.com/client9/misspell/README.md
generated
vendored
Normal file
@ -0,0 +1,406 @@
|
||||
[](https://travis-ci.org/client9/misspell) [](https://goreportcard.com/report/github.com/client9/misspell) [](https://godoc.org/github.com/client9/misspell) [](http://gocover.io/github.com/client9/misspell) [](https://raw.githubusercontent.com/client9/misspell/master/LICENSE)
|
||||
|
||||
Correct commonly misspelled English words... quickly.
|
||||
|
||||
### Install
|
||||
|
||||
|
||||
If you just want a binary and to start using `misspell`:
|
||||
|
||||
```
|
||||
curl -o ./godownloader-misspell.sh https://raw.githubusercontent.com/client9/misspell/master/godownloader-misspell.sh
|
||||
/bin/sh ./godownloader-misspell.sh
|
||||
```
|
||||
|
||||
will install as `./bin/misspell`. You can adjust the download location using the `-b` flag. File a ticket if you want another platform supported.
|
||||
|
||||
|
||||
If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way:
|
||||
|
||||
```
|
||||
go get -u github.com/client9/misspell/cmd/misspell
|
||||
```
|
||||
|
||||
and misspell will be in your `GOPATH`
|
||||
|
||||
### Usage
|
||||
|
||||
|
||||
```bash
|
||||
$ misspell all.html your.txt important.md files.go
|
||||
your.txt:42:10 found "langauge" a misspelling of "language"
|
||||
|
||||
# ^ file, line, column
|
||||
```
|
||||
|
||||
```
|
||||
$ misspell -help
|
||||
Usage of misspell:
|
||||
-debug
|
||||
Debug matching, very slow
|
||||
-error
|
||||
Exit with 2 if misspelling found
|
||||
-f string
|
||||
'csv', 'sqlite3' or custom Golang template for output
|
||||
-i string
|
||||
ignore the following corrections, comma separated
|
||||
-j int
|
||||
Number of workers, 0 = number of CPUs
|
||||
-legal
|
||||
Show legal information and exit
|
||||
-locale string
|
||||
Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color'
|
||||
-o string
|
||||
output file or [stderr|stdout|] (default "stdout")
|
||||
-q Do not emit misspelling output
|
||||
-source string
|
||||
Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto")
|
||||
-w Overwrite file with corrections (default is just to display)
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
* [Automatic Corrections](#correct)
|
||||
* [Converting UK spellings to US](#locale)
|
||||
* [Using pipes and stdin](#stdin)
|
||||
* [Golang special support](#golang)
|
||||
* [gometalinter support](#gometalinter)
|
||||
* [CSV Output](#csv)
|
||||
* [Using SQLite3](#sqlite)
|
||||
* [Changing output format](#output)
|
||||
* [Checking a folder recursively](#recursive)
|
||||
* [Performance](#performance)
|
||||
* [Known Issues](#issues)
|
||||
* [Debugging](#debug)
|
||||
* [False Negatives and missing words](#missing)
|
||||
* [Origin of Word Lists](#words)
|
||||
* [Software License](#license)
|
||||
* [Problem statement](#problem)
|
||||
* [Other spelling correctors](#others)
|
||||
* [Other ideas](#otherideas)
|
||||
|
||||
<a name="correct"></a>
|
||||
### How can I make the corrections automatically?
|
||||
|
||||
Just add the `-w` flag!
|
||||
|
||||
```
|
||||
$ misspell -w all.html your.txt important.md files.go
|
||||
your.txt:9:21:corrected "langauge" to "language"
|
||||
|
||||
# ^booyah
|
||||
```
|
||||
|
||||
<a name="locale"></a>
|
||||
### How do I convert British spellings to American (or vice-versa)?
|
||||
|
||||
Add the `-locale US` flag!
|
||||
|
||||
```bash
|
||||
$ misspell -locale US important.txt
|
||||
important.txt:10:20 found "colour" a misspelling of "color"
|
||||
```
|
||||
|
||||
Add the `-locale UK` flag!
|
||||
|
||||
```bash
|
||||
$ echo "My favorite color is blue" | misspell -locale UK
|
||||
stdin:1:3:found "favorite color" a misspelling of "favourite colour"
|
||||
```
|
||||
|
||||
Help is appreciated as I'm neither British nor an
|
||||
expert in the English language.
|
||||
|
||||
<a name="recursive"></a>
|
||||
### How do you check an entire folder recursively?
|
||||
|
||||
Just list a directory you'd like to check
|
||||
|
||||
```bash
|
||||
misspell .
|
||||
misspell aDirectory anotherDirectory aFile
|
||||
```
|
||||
|
||||
You can also run misspell recursively using the following shell tricks:
|
||||
|
||||
```bash
|
||||
misspell directory/**/*
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```bash
|
||||
find . -type f | xargs misspell
|
||||
```
|
||||
|
||||
You can select a type of file as well. The following examples selects all `.txt` files that are *not* in the `vendor` directory:
|
||||
|
||||
```bash
|
||||
find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error
|
||||
```
|
||||
|
||||
<a name="stdin"></a>
|
||||
### Can I use pipes or `stdin` for input?
|
||||
|
||||
Yes!
|
||||
|
||||
Print messages to `stderr` only:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell
|
||||
stdin:1:0:found "zeebra" a misspelling of "zebra"
|
||||
```
|
||||
|
||||
Print messages to `stderr`, and corrected text to `stdout`:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell -w
|
||||
stdin:1:0:corrected "zeebra" to "zebra"
|
||||
zebra
|
||||
```
|
||||
|
||||
Only print the corrected text to `stdout`:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell -w -q
|
||||
zebra
|
||||
```
|
||||
|
||||
<a name="golang"></a>
|
||||
### Are there special rules for golang source files?
|
||||
|
||||
Yes! If the file ends in `.go`, then misspell will only check spelling in
|
||||
comments.
|
||||
|
||||
If you want to force a file to be checked as a golang source, use `-source=go`
|
||||
on the command line. Conversely, you can check a golang source as if it were
|
||||
pure text by using `-source=text`. You might want to do this since many
|
||||
variable names have misspellings in them!
|
||||
|
||||
### Can I check only-comments in other other programming languages?
|
||||
|
||||
I'm told the using `-source=go` works well for ruby, javascript, java, c and
|
||||
c++.
|
||||
|
||||
It doesn't work well for python and bash.
|
||||
|
||||
<a name="gometalinter"></a>
|
||||
### Does this work with gometalinter?
|
||||
|
||||
[gometalinter](https://github.com/alecthomas/gometalinter) runs
|
||||
multiple golang linters. Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134)
|
||||
gometalinter supports `misspell` natively but it is disabled by default.
|
||||
|
||||
```bash
|
||||
# update your copy of gometalinter
|
||||
go get -u github.com/alecthomas/gometalinter
|
||||
|
||||
# install updates and misspell
|
||||
gometalinter --install --update
|
||||
```
|
||||
|
||||
To use, just enable `misspell`
|
||||
|
||||
```
|
||||
gometalinter --enable misspell ./...
|
||||
```
|
||||
|
||||
Note that gometalinter only checks golang files, and uses the default options
|
||||
of `misspell`
|
||||
|
||||
You may wish to run this on your plaintext (.txt) and/or markdown files too.
|
||||
|
||||
|
||||
<a name="csv"></a>
|
||||
### How Can I Get CSV Output?
|
||||
|
||||
Using `-f csv`, the output is standard comma-seprated values with headers in the first row.
|
||||
|
||||
```
|
||||
misspell -f csv *
|
||||
file,line,column,typo,corrected
|
||||
"README.md",9,22,langauge,language
|
||||
"README.md",47,25,langauge,language
|
||||
```
|
||||
|
||||
<a name="sqlite"></a>
|
||||
### How can I export to SQLite3?
|
||||
|
||||
Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file.
|
||||
|
||||
```bash
|
||||
$ misspell -f sqlite * > /tmp/misspell.sql
|
||||
$ cat /tmp/misspell.sql
|
||||
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN TRANSACTION;
|
||||
CREATE TABLE misspell(
|
||||
"file" TEXT,
|
||||
"line" INTEGER,i
|
||||
"column" INTEGER,i
|
||||
"typo" TEXT,
|
||||
"corrected" TEXT
|
||||
);
|
||||
INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately");
|
||||
# etc...
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
```bash
|
||||
$ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell'
|
||||
1
|
||||
```
|
||||
|
||||
With some tricks you can directly pipe output to sqlite3 by using `-init /dev/stdin`:
|
||||
|
||||
```
|
||||
misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':memory' \
|
||||
'select substr(file,35),typo,count(*) as count from misspell group by file, typo order by count desc;'
|
||||
```
|
||||
|
||||
<a name="output"></a>
|
||||
### How can I change the output format?
|
||||
|
||||
Using the `-f template` flag you can pass in a
|
||||
[golang text template](https://golang.org/pkg/text/template/) to format the output.
|
||||
|
||||
One can use `printf "%q" VALUE` to safely quote a value.
|
||||
|
||||
The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter)
|
||||
```
|
||||
{{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}"
|
||||
```
|
||||
|
||||
To just print probable misspellings:
|
||||
|
||||
```
|
||||
-f '{{ .Original }}'
|
||||
```
|
||||
|
||||
<a name="problem"></a>
|
||||
### What problem does this solve?
|
||||
|
||||
This corrects commonly misspelled English words in computer source
|
||||
code, and other text-based formats (`.txt`, `.md`, etc).
|
||||
|
||||
It is designed to run quickly so it can be
|
||||
used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
||||
with minimal burden on the developer.
|
||||
|
||||
It does not work with binary formats (e.g. Word, etc).
|
||||
|
||||
It is not a complete spell-checking program nor a grammar checker.
|
||||
|
||||
<a name="others"></a>
|
||||
### What are other misspelling correctors and what's wrong with them?
|
||||
|
||||
Some other misspelling correctors:
|
||||
|
||||
* https://github.com/vlajos/misspell_fixer
|
||||
* https://github.com/lyda/misspell-check
|
||||
* https://github.com/lucasdemarchi/codespell
|
||||
|
||||
They all work but had problems that prevented me from using them at scale:
|
||||
|
||||
* slow, all of the above check one misspelling at a time (i.e. linear) using regexps
|
||||
* not MIT/Apache2 licensed (or equivalent)
|
||||
* have dependencies that don't work for me (python3, bash, linux sed, etc)
|
||||
* don't understand American vs. British English and sometimes makes unwelcome "corrections"
|
||||
|
||||
That said, they might be perfect for you and many have more features
|
||||
than this project!
|
||||
|
||||
<a name="performance"></a>
|
||||
### How fast is it?
|
||||
|
||||
Misspell is easily 100x to 1000x faster than other spelling correctors. You
|
||||
should be able to check and correct 1000 files in under 250ms.
|
||||
|
||||
This uses the mighty power of golang's
|
||||
[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is
|
||||
a implementation or variation of the
|
||||
[Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm).
|
||||
This makes multiple substring matches *simultaneously*.
|
||||
|
||||
In addition this uses multiple CPU cores to work on multiple files.
|
||||
|
||||
<a name="issues"></a>
|
||||
### What problems does it have?
|
||||
|
||||
Unlike the other projects, this doesn't know what a "word" is. There may be
|
||||
more false positives and false negatives due to this. On the other hand, it
|
||||
sometimes catches things others don't.
|
||||
|
||||
Either way, please file bugs and we'll fix them!
|
||||
|
||||
Since it operates in parallel to make corrections, it can be non-obvious to
|
||||
determine exactly what word was corrected.
|
||||
|
||||
<a name="debug"></a>
|
||||
### It's making mistakes. How can I debug?
|
||||
|
||||
Run using `-debug` flag on the file you want. It should then print what word
|
||||
it is trying to correct. Then [file a
|
||||
bug](https://github.com/client9/misspell/issues) describing the problem.
|
||||
Thanks!
|
||||
|
||||
<a name="missing"></a>
|
||||
### Why is it making mistakes or missing items in golang files?
|
||||
|
||||
The matching function is *case-sensitive*, so variable names that are multiple
|
||||
worlds either in all-upper or all-lower case sometimes can cause false
|
||||
positives. For instance a variable named `bodyreader` could trigger a false
|
||||
positive since `yrea` is in the middle that could be corrected to `year`.
|
||||
Other problems happen if the variable name uses a English contraction that
|
||||
should use an apostrophe. The best way of fixing this is to use the
|
||||
[Effective Go naming
|
||||
conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use
|
||||
[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names. You
|
||||
can check your code using [golint](https://github.com/golang/lint)
|
||||
|
||||
<a name="license"></a>
|
||||
### What license is this?
|
||||
|
||||
The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE).
|
||||
|
||||
Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer)
|
||||
which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE). Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go)
|
||||
|
||||
<a name="words"></a>
|
||||
### Where do the word lists come from?
|
||||
|
||||
It started with a word list from
|
||||
[Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines).
|
||||
Unfortunately, this list had to be highly edited as many of the words are
|
||||
obsolete or based from mistakes on mechanical typewriters (I'm guessing).
|
||||
|
||||
Additional words were added based on actually mistakes seen in
|
||||
the wild (meaning self-generated).
|
||||
|
||||
Variations of UK and US spellings are based on many sources including:
|
||||
|
||||
* http://www.tysto.com/uk-us-spelling-list.html (with heavy editing, many are incorrect)
|
||||
* http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete)
|
||||
* Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net)
|
||||
|
||||
American English is more accepting of spelling variations than is British
|
||||
English, so "what is American or not" is subject to opinion. Corrections and help welcome.
|
||||
|
||||
<a name="otherideas"></a>
|
||||
### What are some other enhancements that could be done?
|
||||
|
||||
Here's some ideas for enhancements:
|
||||
|
||||
*Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names)
|
||||
|
||||
*Opinionated US spellings* US English has a number of words with alternate
|
||||
spellings. Think [adviser vs.
|
||||
advisor](http://grammarist.com/spelling/adviser-advisor/). While "advisor" is not wrong, the opinionated US
|
||||
locale would correct "advisor" to "adviser".
|
||||
|
||||
*Versioning* Some type of versioning is needed so reporting mistakes and errors is easier.
|
||||
|
||||
*Feedback* Mistakes would be sent to some server for agregation and feedback review.
|
||||
|
||||
*Contractions and Apostrophes* This would optionally correct "isnt" to
|
||||
"isn't", etc.
|
62
tools/vendor/github.com/client9/misspell/ascii.go
generated
vendored
Normal file
62
tools/vendor/github.com/client9/misspell/ascii.go
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
package misspell
|
||||
|
||||
// ByteToUpper converts an ascii byte to upper cases
|
||||
// Uses a branchless algorithm
|
||||
func ByteToUpper(x byte) byte {
|
||||
b := byte(0x80) | x
|
||||
c := b - byte(0x61)
|
||||
d := ^(b - byte(0x7b))
|
||||
e := (c & d) & (^x & 0x7f)
|
||||
return x - (e >> 2)
|
||||
}
|
||||
|
||||
// ByteToLower converts an ascii byte to lower case
|
||||
// uses a branchless algorithm
|
||||
func ByteToLower(eax byte) byte {
|
||||
ebx := eax&byte(0x7f) + byte(0x25)
|
||||
ebx = ebx&byte(0x7f) + byte(0x1a)
|
||||
ebx = ((ebx & ^eax) >> 2) & byte(0x20)
|
||||
return eax + ebx
|
||||
}
|
||||
|
||||
// ByteEqualFold does ascii compare, case insensitive
|
||||
func ByteEqualFold(a, b byte) bool {
|
||||
return a == b || ByteToLower(a) == ByteToLower(b)
|
||||
}
|
||||
|
||||
// StringEqualFold ASCII case-insensitive comparison
|
||||
// golang toUpper/toLower for both bytes and strings
|
||||
// appears to be Unicode based which is super slow
|
||||
// based from https://codereview.appspot.com/5180044/patch/14007/21002
|
||||
func StringEqualFold(s1, s2 string) bool {
|
||||
if len(s1) != len(s2) {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < len(s1); i++ {
|
||||
c1 := s1[i]
|
||||
c2 := s2[i]
|
||||
// c1 & c2
|
||||
if c1 != c2 {
|
||||
c1 |= 'a' - 'A'
|
||||
c2 |= 'a' - 'A'
|
||||
if c1 != c2 || c1 < 'a' || c1 > 'z' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// StringHasPrefixFold is similar to strings.HasPrefix but comparison
|
||||
// is done ignoring ASCII case.
|
||||
// /
|
||||
func StringHasPrefixFold(s1, s2 string) bool {
|
||||
// prefix is bigger than input --> false
|
||||
if len(s1) < len(s2) {
|
||||
return false
|
||||
}
|
||||
if len(s1) == len(s2) {
|
||||
return StringEqualFold(s1, s2)
|
||||
}
|
||||
return StringEqualFold(s1[:len(s2)], s2)
|
||||
}
|
59
tools/vendor/github.com/client9/misspell/case.go
generated
vendored
Normal file
59
tools/vendor/github.com/client9/misspell/case.go
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// WordCase is an enum of various word casing styles
|
||||
type WordCase int
|
||||
|
||||
// Various WordCase types.. likely to be not correct
|
||||
const (
|
||||
CaseUnknown WordCase = iota
|
||||
CaseLower
|
||||
CaseUpper
|
||||
CaseTitle
|
||||
)
|
||||
|
||||
// CaseStyle returns what case style a word is in
|
||||
func CaseStyle(word string) WordCase {
|
||||
upperCount := 0
|
||||
lowerCount := 0
|
||||
|
||||
// this iterates over RUNES not BYTES
|
||||
for i := 0; i < len(word); i++ {
|
||||
ch := word[i]
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
lowerCount++
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
upperCount++
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case upperCount != 0 && lowerCount == 0:
|
||||
return CaseUpper
|
||||
case upperCount == 0 && lowerCount != 0:
|
||||
return CaseLower
|
||||
case upperCount == 1 && lowerCount > 0 && word[0] >= 'A' && word[0] <= 'Z':
|
||||
return CaseTitle
|
||||
}
|
||||
return CaseUnknown
|
||||
}
|
||||
|
||||
// CaseVariations returns
|
||||
// If AllUpper or First-Letter-Only is upcased: add the all upper case version
|
||||
// If AllLower, add the original, the title and upcase forms
|
||||
// If Mixed, return the original, and the all upcase form
|
||||
//
|
||||
func CaseVariations(word string, style WordCase) []string {
|
||||
switch style {
|
||||
case CaseLower:
|
||||
return []string{word, strings.ToUpper(word[0:1]) + word[1:], strings.ToUpper(word)}
|
||||
case CaseUpper:
|
||||
return []string{strings.ToUpper(word)}
|
||||
default:
|
||||
return []string{word, strings.ToUpper(word)}
|
||||
}
|
||||
}
|
325
tools/vendor/github.com/client9/misspell/cmd/misspell/main.go
generated
vendored
Normal file
325
tools/vendor/github.com/client9/misspell/cmd/misspell/main.go
generated
vendored
Normal file
@ -0,0 +1,325 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/client9/misspell"
|
||||
)
|
||||
|
||||
var (
|
||||
defaultWrite *template.Template
|
||||
defaultRead *template.Template
|
||||
|
||||
stdout *log.Logger
|
||||
debug *log.Logger
|
||||
|
||||
version = "dev"
|
||||
)
|
||||
|
||||
const (
|
||||
// Note for gometalinter it must be "File:Line:Column: Msg"
|
||||
// note space beteen ": Msg"
|
||||
defaultWriteTmpl = `{{ .Filename }}:{{ .Line }}:{{ .Column }}: corrected "{{ .Original }}" to "{{ .Corrected }}"`
|
||||
defaultReadTmpl = `{{ .Filename }}:{{ .Line }}:{{ .Column }}: "{{ .Original }}" is a misspelling of "{{ .Corrected }}"`
|
||||
csvTmpl = `{{ printf "%q" .Filename }},{{ .Line }},{{ .Column }},{{ .Original }},{{ .Corrected }}`
|
||||
csvHeader = `file,line,column,typo,corrected`
|
||||
sqliteTmpl = `INSERT INTO misspell VALUES({{ printf "%q" .Filename }},{{ .Line }},{{ .Column }},{{ printf "%q" .Original }},{{ printf "%q" .Corrected }});`
|
||||
sqliteHeader = `PRAGMA foreign_keys=OFF;
|
||||
BEGIN TRANSACTION;
|
||||
CREATE TABLE misspell(
|
||||
"file" TEXT, "line" INTEGER, "column" INTEGER, "typo" TEXT, "corrected" TEXT
|
||||
);`
|
||||
sqliteFooter = "COMMIT;"
|
||||
)
|
||||
|
||||
func worker(writeit bool, r *misspell.Replacer, mode string, files <-chan string, results chan<- int) {
|
||||
count := 0
|
||||
for filename := range files {
|
||||
orig, err := misspell.ReadTextFile(filename)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
continue
|
||||
}
|
||||
if len(orig) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
debug.Printf("Processing %s", filename)
|
||||
|
||||
var updated string
|
||||
var changes []misspell.Diff
|
||||
|
||||
if mode == "go" {
|
||||
updated, changes = r.ReplaceGo(orig)
|
||||
} else {
|
||||
updated, changes = r.Replace(orig)
|
||||
}
|
||||
|
||||
if len(changes) == 0 {
|
||||
continue
|
||||
}
|
||||
count += len(changes)
|
||||
for _, diff := range changes {
|
||||
// add in filename
|
||||
diff.Filename = filename
|
||||
|
||||
// output can be done by doing multiple goroutines
|
||||
// and can clobber os.Stdout.
|
||||
//
|
||||
// the log package can be used simultaneously from multiple goroutines
|
||||
var output bytes.Buffer
|
||||
if writeit {
|
||||
defaultWrite.Execute(&output, diff)
|
||||
} else {
|
||||
defaultRead.Execute(&output, diff)
|
||||
}
|
||||
|
||||
// goroutine-safe print to os.Stdout
|
||||
stdout.Println(output.String())
|
||||
}
|
||||
|
||||
if writeit {
|
||||
ioutil.WriteFile(filename, []byte(updated), 0)
|
||||
}
|
||||
}
|
||||
results <- count
|
||||
}
|
||||
|
||||
func main() {
|
||||
t := time.Now()
|
||||
var (
|
||||
workers = flag.Int("j", 0, "Number of workers, 0 = number of CPUs")
|
||||
writeit = flag.Bool("w", false, "Overwrite file with corrections (default is just to display)")
|
||||
quietFlag = flag.Bool("q", false, "Do not emit misspelling output")
|
||||
outFlag = flag.String("o", "stdout", "output file or [stderr|stdout|]")
|
||||
format = flag.String("f", "", "'csv', 'sqlite3' or custom Golang template for output")
|
||||
ignores = flag.String("i", "", "ignore the following corrections, comma separated")
|
||||
locale = flag.String("locale", "", "Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color'")
|
||||
mode = flag.String("source", "auto", "Source mode: auto=guess, go=golang source, text=plain or markdown-like text")
|
||||
debugFlag = flag.Bool("debug", false, "Debug matching, very slow")
|
||||
exitError = flag.Bool("error", false, "Exit with 2 if misspelling found")
|
||||
showVersion = flag.Bool("v", false, "Show version and exit")
|
||||
|
||||
showLegal = flag.Bool("legal", false, "Show legal information and exit")
|
||||
)
|
||||
flag.Parse()
|
||||
|
||||
if *showVersion {
|
||||
fmt.Println(version)
|
||||
return
|
||||
}
|
||||
if *showLegal {
|
||||
fmt.Println(misspell.Legal)
|
||||
return
|
||||
}
|
||||
if *debugFlag {
|
||||
debug = log.New(os.Stderr, "DEBUG ", 0)
|
||||
} else {
|
||||
debug = log.New(ioutil.Discard, "", 0)
|
||||
}
|
||||
|
||||
r := misspell.Replacer{
|
||||
Replacements: misspell.DictMain,
|
||||
Debug: *debugFlag,
|
||||
}
|
||||
//
|
||||
// Figure out regional variations
|
||||
//
|
||||
switch strings.ToUpper(*locale) {
|
||||
case "":
|
||||
// nothing
|
||||
case "US":
|
||||
r.AddRuleList(misspell.DictAmerican)
|
||||
case "UK", "GB":
|
||||
r.AddRuleList(misspell.DictBritish)
|
||||
case "NZ", "AU", "CA":
|
||||
log.Fatalf("Help wanted. https://github.com/client9/misspell/issues/6")
|
||||
default:
|
||||
log.Fatalf("Unknown locale: %q", *locale)
|
||||
}
|
||||
|
||||
//
|
||||
// Stuff to ignore
|
||||
//
|
||||
if len(*ignores) > 0 {
|
||||
r.RemoveRule(strings.Split(*ignores, ","))
|
||||
}
|
||||
|
||||
//
|
||||
// Source input mode
|
||||
//
|
||||
switch *mode {
|
||||
case "auto":
|
||||
case "go":
|
||||
case "text":
|
||||
default:
|
||||
log.Fatalf("Mode must be one of auto=guess, go=golang source, text=plain or markdown-like text")
|
||||
}
|
||||
|
||||
//
|
||||
// Custom output
|
||||
//
|
||||
switch {
|
||||
case *format == "csv":
|
||||
tmpl := template.Must(template.New("csv").Parse(csvTmpl))
|
||||
defaultWrite = tmpl
|
||||
defaultRead = tmpl
|
||||
stdout.Println(csvHeader)
|
||||
case *format == "sqlite" || *format == "sqlite3":
|
||||
tmpl := template.Must(template.New("sqlite3").Parse(sqliteTmpl))
|
||||
defaultWrite = tmpl
|
||||
defaultRead = tmpl
|
||||
stdout.Println(sqliteHeader)
|
||||
case len(*format) > 0:
|
||||
t, err := template.New("custom").Parse(*format)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to compile log format: %s", err)
|
||||
}
|
||||
defaultWrite = t
|
||||
defaultRead = t
|
||||
default: // format == ""
|
||||
defaultWrite = template.Must(template.New("defaultWrite").Parse(defaultWriteTmpl))
|
||||
defaultRead = template.Must(template.New("defaultRead").Parse(defaultReadTmpl))
|
||||
}
|
||||
|
||||
// we cant't just write to os.Stdout directly since we have multiple goroutine
|
||||
// all writing at the same time causing broken output. Log is routine safe.
|
||||
// we see it so it doesn't use a prefix or include a time stamp.
|
||||
switch {
|
||||
case *quietFlag || *outFlag == "/dev/null":
|
||||
stdout = log.New(ioutil.Discard, "", 0)
|
||||
case *outFlag == "/dev/stderr" || *outFlag == "stderr":
|
||||
stdout = log.New(os.Stderr, "", 0)
|
||||
case *outFlag == "/dev/stdout" || *outFlag == "stdout":
|
||||
stdout = log.New(os.Stdout, "", 0)
|
||||
case *outFlag == "" || *outFlag == "-":
|
||||
stdout = log.New(os.Stdout, "", 0)
|
||||
default:
|
||||
fo, err := os.Create(*outFlag)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create outfile %q: %s", *outFlag, err)
|
||||
}
|
||||
defer fo.Close()
|
||||
stdout = log.New(fo, "", 0)
|
||||
}
|
||||
|
||||
//
|
||||
// Number of Workers / CPU to use
|
||||
//
|
||||
if *workers < 0 {
|
||||
log.Fatalf("-j must >= 0")
|
||||
}
|
||||
if *workers == 0 {
|
||||
*workers = runtime.NumCPU()
|
||||
}
|
||||
if *debugFlag {
|
||||
*workers = 1
|
||||
}
|
||||
|
||||
//
|
||||
// Done with Flags.
|
||||
// Compile the Replacer and process files
|
||||
//
|
||||
r.Compile()
|
||||
|
||||
args := flag.Args()
|
||||
debug.Printf("initialization complete in %v", time.Since(t))
|
||||
|
||||
// stdin/stdout
|
||||
if len(args) == 0 {
|
||||
// if we are working with pipes/stdin/stdout
|
||||
// there is no concurrency, so we can directly
|
||||
// send data to the writers
|
||||
var fileout io.Writer
|
||||
var errout io.Writer
|
||||
switch *writeit {
|
||||
case true:
|
||||
// if we ARE writing the corrected stream
|
||||
// the corrected stream goes to stdout
|
||||
// and the misspelling errors goes to stderr
|
||||
// so we can do something like this:
|
||||
// curl something | misspell -w | gzip > afile.gz
|
||||
fileout = os.Stdout
|
||||
errout = os.Stderr
|
||||
case false:
|
||||
// if we are not writing out the corrected stream
|
||||
// then work just like files. Misspelling errors
|
||||
// are sent to stdout
|
||||
fileout = ioutil.Discard
|
||||
errout = os.Stdout
|
||||
}
|
||||
count := 0
|
||||
next := func(diff misspell.Diff) {
|
||||
count++
|
||||
|
||||
// don't even evaluate the output templates
|
||||
if *quietFlag {
|
||||
return
|
||||
}
|
||||
diff.Filename = "stdin"
|
||||
if *writeit {
|
||||
defaultWrite.Execute(errout, diff)
|
||||
} else {
|
||||
defaultRead.Execute(errout, diff)
|
||||
}
|
||||
errout.Write([]byte{'\n'})
|
||||
|
||||
}
|
||||
err := r.ReplaceReader(os.Stdin, fileout, next)
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
switch *format {
|
||||
case "sqlite", "sqlite3":
|
||||
fileout.Write([]byte(sqliteFooter))
|
||||
}
|
||||
if count != 0 && *exitError {
|
||||
// error
|
||||
os.Exit(2)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
c := make(chan string, 64)
|
||||
results := make(chan int, *workers)
|
||||
|
||||
for i := 0; i < *workers; i++ {
|
||||
go worker(*writeit, &r, *mode, c, results)
|
||||
}
|
||||
|
||||
for _, filename := range args {
|
||||
filepath.Walk(filename, func(path string, info os.FileInfo, err error) error {
|
||||
if err == nil && !info.IsDir() {
|
||||
c <- path
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
close(c)
|
||||
|
||||
count := 0
|
||||
for i := 0; i < *workers; i++ {
|
||||
changed := <-results
|
||||
count += changed
|
||||
}
|
||||
|
||||
switch *format {
|
||||
case "sqlite", "sqlite3":
|
||||
stdout.Println(sqliteFooter)
|
||||
}
|
||||
|
||||
if count != 0 && *exitError {
|
||||
os.Exit(2)
|
||||
}
|
||||
}
|
318
tools/vendor/github.com/client9/misspell/godownloader-misspell.sh
generated
vendored
Executable file
318
tools/vendor/github.com/client9/misspell/godownloader-misspell.sh
generated
vendored
Executable file
@ -0,0 +1,318 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
# Code generated by godownloader. DO NOT EDIT.
|
||||
#
|
||||
|
||||
usage() {
|
||||
this=$1
|
||||
cat <<EOF
|
||||
$this: download go binaries for client9/misspell
|
||||
|
||||
Usage: $this [-b] bindir [version]
|
||||
-b sets bindir or installation directory, default "./bin"
|
||||
[version] is a version number from
|
||||
https://github.com/client9/misspell/releases
|
||||
If version is missing, then an attempt to find the latest will be found.
|
||||
|
||||
Generated by godownloader
|
||||
https://github.com/goreleaser/godownloader
|
||||
|
||||
EOF
|
||||
exit 2
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
#BINDIR is ./bin unless set be ENV
|
||||
# over-ridden by flag below
|
||||
|
||||
BINDIR=${BINDIR:-./bin}
|
||||
while getopts "b:h?" arg; do
|
||||
case "$arg" in
|
||||
b) BINDIR="$OPTARG" ;;
|
||||
h | \?) usage "$0" ;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND - 1))
|
||||
VERSION=$1
|
||||
}
|
||||
# this function wraps all the destructive operations
|
||||
# if a curl|bash cuts off the end of the script due to
|
||||
# network, either nothing will happen or will syntax error
|
||||
# out preventing half-done work
|
||||
execute() {
|
||||
TMPDIR=$(mktmpdir)
|
||||
echo "$PREFIX: downloading ${TARBALL_URL}"
|
||||
http_download "${TMPDIR}/${TARBALL}" "${TARBALL_URL}"
|
||||
|
||||
echo "$PREFIX: verifying checksums"
|
||||
http_download "${TMPDIR}/${CHECKSUM}" "${CHECKSUM_URL}"
|
||||
hash_sha256_verify "${TMPDIR}/${TARBALL}" "${TMPDIR}/${CHECKSUM}"
|
||||
|
||||
(cd "${TMPDIR}" && untar "${TARBALL}")
|
||||
install -d "${BINDIR}"
|
||||
install "${TMPDIR}/${BINARY}" "${BINDIR}/"
|
||||
echo "$PREFIX: installed as ${BINDIR}/${BINARY}"
|
||||
}
|
||||
is_supported_platform() {
|
||||
platform=$1
|
||||
found=1
|
||||
case "$platform" in
|
||||
darwin/amd64) found=0 ;;
|
||||
linux/amd64) found=0 ;;
|
||||
esac
|
||||
case "$platform" in
|
||||
darwin/386) found=1 ;;
|
||||
esac
|
||||
return $found
|
||||
}
|
||||
check_platform() {
|
||||
if is_supported_platform "$PLATFORM"; then
|
||||
# optional logging goes here
|
||||
true
|
||||
else
|
||||
echo "${PREFIX}: platform $PLATFORM is not supported. Make sure this script is up-to-date and file request at https://github.com/${PREFIX}/issues/new"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
adjust_version() {
|
||||
if [ -z "${VERSION}" ]; then
|
||||
echo "$PREFIX: checking GitHub for latest version"
|
||||
VERSION=$(github_last_release "$OWNER/$REPO")
|
||||
fi
|
||||
# if version starts with 'v', remove it
|
||||
VERSION=${VERSION#v}
|
||||
}
|
||||
adjust_format() {
|
||||
# change format (tar.gz or zip) based on ARCH
|
||||
true
|
||||
}
|
||||
adjust_os() {
|
||||
# adjust archive name based on OS
|
||||
case ${OS} in
|
||||
386) OS=32bit ;;
|
||||
amd64) OS=64bit ;;
|
||||
darwin) OS=mac ;;
|
||||
esac
|
||||
true
|
||||
}
|
||||
adjust_arch() {
|
||||
# adjust archive name based on ARCH
|
||||
case ${ARCH} in
|
||||
386) ARCH=32bit ;;
|
||||
amd64) ARCH=64bit ;;
|
||||
darwin) ARCH=mac ;;
|
||||
esac
|
||||
true
|
||||
}
|
||||
|
||||
cat /dev/null <<EOF
|
||||
------------------------------------------------------------------------
|
||||
https://github.com/client9/shlib - portable posix shell functions
|
||||
Public domain - http://unlicense.org
|
||||
https://github.com/client9/shlib/blob/master/LICENSE.md
|
||||
but credit (and pull requests) appreciated.
|
||||
------------------------------------------------------------------------
|
||||
EOF
|
||||
is_command() {
|
||||
command -v "$1" >/dev/null
|
||||
}
|
||||
uname_os() {
|
||||
os=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
echo "$os"
|
||||
}
|
||||
uname_arch() {
|
||||
arch=$(uname -m)
|
||||
case $arch in
|
||||
x86_64) arch="amd64" ;;
|
||||
x86) arch="386" ;;
|
||||
i686) arch="386" ;;
|
||||
i386) arch="386" ;;
|
||||
aarch64) arch="arm64" ;;
|
||||
armv5*) arch="arm5" ;;
|
||||
armv6*) arch="arm6" ;;
|
||||
armv7*) arch="arm7" ;;
|
||||
esac
|
||||
echo ${arch}
|
||||
}
|
||||
uname_os_check() {
|
||||
os=$(uname_os)
|
||||
case "$os" in
|
||||
darwin) return 0 ;;
|
||||
dragonfly) return 0 ;;
|
||||
freebsd) return 0 ;;
|
||||
linux) return 0 ;;
|
||||
android) return 0 ;;
|
||||
nacl) return 0 ;;
|
||||
netbsd) return 0 ;;
|
||||
openbsd) return 0 ;;
|
||||
plan9) return 0 ;;
|
||||
solaris) return 0 ;;
|
||||
windows) return 0 ;;
|
||||
esac
|
||||
echo "$0: uname_os_check: internal error '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
|
||||
return 1
|
||||
}
|
||||
uname_arch_check() {
|
||||
arch=$(uname_arch)
|
||||
case "$arch" in
|
||||
386) return 0 ;;
|
||||
amd64) return 0 ;;
|
||||
arm64) return 0 ;;
|
||||
armv5) return 0 ;;
|
||||
armv6) return 0 ;;
|
||||
armv7) return 0 ;;
|
||||
ppc64) return 0 ;;
|
||||
ppc64le) return 0 ;;
|
||||
mips) return 0 ;;
|
||||
mipsle) return 0 ;;
|
||||
mips64) return 0 ;;
|
||||
mips64le) return 0 ;;
|
||||
s390x) return 0 ;;
|
||||
amd64p32) return 0 ;;
|
||||
esac
|
||||
echo "$0: uname_arch_check: internal error '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
|
||||
return 1
|
||||
}
|
||||
untar() {
|
||||
tarball=$1
|
||||
case "${tarball}" in
|
||||
*.tar.gz | *.tgz) tar -xzf "${tarball}" ;;
|
||||
*.tar) tar -xf "${tarball}" ;;
|
||||
*.zip) unzip "${tarball}" ;;
|
||||
*)
|
||||
echo "Unknown archive format for ${tarball}"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
mktmpdir() {
|
||||
test -z "$TMPDIR" && TMPDIR="$(mktemp -d)"
|
||||
mkdir -p "${TMPDIR}"
|
||||
echo "${TMPDIR}"
|
||||
}
|
||||
http_download() {
|
||||
local_file=$1
|
||||
source_url=$2
|
||||
header=$3
|
||||
headerflag=''
|
||||
destflag=''
|
||||
if is_command curl; then
|
||||
cmd='curl --fail -sSL'
|
||||
destflag='-o'
|
||||
headerflag='-H'
|
||||
elif is_command wget; then
|
||||
cmd='wget -q'
|
||||
destflag='-O'
|
||||
headerflag='--header'
|
||||
else
|
||||
echo "http_download: unable to find wget or curl"
|
||||
return 1
|
||||
fi
|
||||
if [ -z "$header" ]; then
|
||||
$cmd $destflag "$local_file" "$source_url"
|
||||
else
|
||||
$cmd $headerflag "$header" $destflag "$local_file" "$source_url"
|
||||
fi
|
||||
}
|
||||
github_api() {
|
||||
local_file=$1
|
||||
source_url=$2
|
||||
header=""
|
||||
case "$source_url" in
|
||||
https://api.github.com*)
|
||||
test -z "$GITHUB_TOKEN" || header="Authorization: token $GITHUB_TOKEN"
|
||||
;;
|
||||
esac
|
||||
http_download "$local_file" "$source_url" "$header"
|
||||
}
|
||||
github_last_release() {
|
||||
owner_repo=$1
|
||||
giturl="https://api.github.com/repos/${owner_repo}/releases/latest"
|
||||
html=$(github_api - "$giturl")
|
||||
version=$(echo "$html" | grep -m 1 "\"tag_name\":" | cut -f4 -d'"')
|
||||
test -z "$version" && return 1
|
||||
echo "$version"
|
||||
}
|
||||
hash_sha256() {
|
||||
TARGET=${1:-/dev/stdin}
|
||||
if is_command gsha256sum; then
|
||||
hash=$(gsha256sum "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command sha256sum; then
|
||||
hash=$(sha256sum "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command shasum; then
|
||||
hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command openssl; then
|
||||
hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f a
|
||||
else
|
||||
echo "hash_sha256: unable to find command to compute sha-256 hash"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
hash_sha256_verify() {
|
||||
TARGET=$1
|
||||
checksums=$2
|
||||
if [ -z "$checksums" ]; then
|
||||
echo "hash_sha256_verify: checksum file not specified in arg2"
|
||||
return 1
|
||||
fi
|
||||
BASENAME=${TARGET##*/}
|
||||
want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
|
||||
if [ -z "$want" ]; then
|
||||
echo "hash_sha256_verify: unable to find checksum for '${TARGET}' in '${checksums}'"
|
||||
return 1
|
||||
fi
|
||||
got=$(hash_sha256 "$TARGET")
|
||||
if [ "$want" != "$got" ]; then
|
||||
echo "hash_sha256_verify: checksum for '$TARGET' did not verify ${want} vs $got"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
cat /dev/null <<EOF
|
||||
------------------------------------------------------------------------
|
||||
End of functions from https://github.com/client9/shlib
|
||||
------------------------------------------------------------------------
|
||||
EOF
|
||||
|
||||
OWNER=client9
|
||||
REPO=misspell
|
||||
BINARY=misspell
|
||||
FORMAT=tar.gz
|
||||
OS=$(uname_os)
|
||||
ARCH=$(uname_arch)
|
||||
PREFIX="$OWNER/$REPO"
|
||||
PLATFORM="${OS}/${ARCH}"
|
||||
GITHUB_DOWNLOAD=https://github.com/${OWNER}/${REPO}/releases/download
|
||||
|
||||
uname_os_check "$OS"
|
||||
uname_arch_check "$ARCH"
|
||||
|
||||
parse_args "$@"
|
||||
|
||||
check_platform
|
||||
|
||||
adjust_version
|
||||
|
||||
adjust_format
|
||||
|
||||
adjust_os
|
||||
|
||||
adjust_arch
|
||||
|
||||
echo "$PREFIX: found version ${VERSION} for ${OS}/${ARCH}"
|
||||
|
||||
NAME=${BINARY}_${VERSION}_${OS}_${ARCH}
|
||||
TARBALL=${NAME}.${FORMAT}
|
||||
TARBALL_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${TARBALL}
|
||||
CHECKSUM=${REPO}_checksums.txt
|
||||
CHECKSUM_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${CHECKSUM}
|
||||
|
||||
# Adjust binary name if windows
|
||||
if [ "$OS" = "windows" ]; then
|
||||
BINARY="${BINARY}.exe"
|
||||
fi
|
||||
|
||||
execute
|
26
tools/vendor/github.com/client9/misspell/goreleaser.yml
generated
vendored
Normal file
26
tools/vendor/github.com/client9/misspell/goreleaser.yml
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# goreleaser.yml
|
||||
# https://github.com/goreleaser/goreleaser
|
||||
build:
|
||||
main: cmd/misspell/main.go
|
||||
binary: misspell
|
||||
ldflags: -s -w -X main.version={{.Version}}
|
||||
goos:
|
||||
- darwin
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: 386
|
||||
|
||||
archive:
|
||||
name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
|
||||
replacements:
|
||||
amd64: 64bit
|
||||
386: 32bit
|
||||
darwin: mac
|
||||
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{.Commit}}
|
47
tools/vendor/github.com/client9/misspell/legal.go
generated
vendored
Normal file
47
tools/vendor/github.com/client9/misspell/legal.go
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
package misspell
|
||||
|
||||
// Legal provides licensing info.
|
||||
const Legal = `
|
||||
Execept where noted below, the source code for misspell is
|
||||
copyright Nick Galbreath and distribution is allowed under a
|
||||
MIT license. See the following for details:
|
||||
|
||||
* https://github.com/client9/misspell/blob/master/LICENSE
|
||||
* https://tldrlegal.com/license/mit-license
|
||||
|
||||
Misspell makes uses of the Golang standard library and
|
||||
contains a modified version of Golang's strings.Replacer
|
||||
which are covered under a BSD License.
|
||||
|
||||
* https://golang.org/pkg/strings/#Replacer
|
||||
* https://golang.org/src/strings/replace.go
|
||||
* https://github.com/golang/go/blob/master/LICENSE
|
||||
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
`
|
210
tools/vendor/github.com/client9/misspell/mime.go
generated
vendored
Normal file
210
tools/vendor/github.com/client9/misspell/mime.go
generated
vendored
Normal file
@ -0,0 +1,210 @@
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// The number of possible binary formats is very large
|
||||
// items that might be checked into a repo or be an
|
||||
// artifact of a build. Additions welcome.
|
||||
//
|
||||
// Golang's internal table is very small and can't be
|
||||
// relied on. Even then things like ".js" have a mime
|
||||
// type of "application/javascipt" which isn't very helpful.
|
||||
// "[x]" means we have sniff test and suffix test should be eliminated
|
||||
var binary = map[string]bool{
|
||||
".a": true, // [ ] archive
|
||||
".bin": true, // [ ] binary
|
||||
".bz2": true, // [ ] compression
|
||||
".class": true, // [x] Java class file
|
||||
".dll": true, // [ ] shared library
|
||||
".exe": true, // [ ] binary
|
||||
".gif": true, // [ ] image
|
||||
".gpg": true, // [x] text, but really all base64
|
||||
".gz": true, // [ ] compression
|
||||
".ico": true, // [ ] image
|
||||
".jar": true, // [x] archive
|
||||
".jpeg": true, // [ ] image
|
||||
".jpg": true, // [ ] image
|
||||
".mp3": true, // [ ] audio
|
||||
".mp4": true, // [ ] video
|
||||
".mpeg": true, // [ ] video
|
||||
".o": true, // [ ] object file
|
||||
".pdf": true, // [x] pdf
|
||||
".png": true, // [x] image
|
||||
".pyc": true, // [ ] Python bytecode
|
||||
".pyo": true, // [ ] Python bytecode
|
||||
".so": true, // [x] shared library
|
||||
".swp": true, // [ ] vim swap file
|
||||
".tar": true, // [ ] archive
|
||||
".tiff": true, // [ ] image
|
||||
".woff": true, // [ ] font
|
||||
".woff2": true, // [ ] font
|
||||
".xz": true, // [ ] compression
|
||||
".z": true, // [ ] compression
|
||||
".zip": true, // [x] archive
|
||||
}
|
||||
|
||||
// isBinaryFilename returns true if the file is likely to be binary
|
||||
//
|
||||
// Better heuristics could be done here, in particular a binary
|
||||
// file is unlikely to be UTF-8 encoded. However this is cheap
|
||||
// and will solve the immediate need of making sure common
|
||||
// binary formats are not corrupted by mistake.
|
||||
func isBinaryFilename(s string) bool {
|
||||
return binary[strings.ToLower(filepath.Ext(s))]
|
||||
}
|
||||
|
||||
var scm = map[string]bool{
|
||||
".bzr": true,
|
||||
".git": true,
|
||||
".hg": true,
|
||||
".svn": true,
|
||||
"CVS": true,
|
||||
}
|
||||
|
||||
// isSCMPath returns true if the path is likely part of a (private) SCM
|
||||
// directory. E.g. ./git/something = true
|
||||
func isSCMPath(s string) bool {
|
||||
// hack for .git/COMMIT_EDITMSG and .git/TAG_EDITMSG
|
||||
// normally we don't look at anything in .git
|
||||
// but COMMIT_EDITMSG and TAG_EDITMSG are used as
|
||||
// temp files for git commits. Allowing misspell to inspect
|
||||
// these files allows for commit-msg hooks
|
||||
// https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
|
||||
if strings.Contains(filepath.Base(s), "EDITMSG") {
|
||||
return false
|
||||
}
|
||||
parts := strings.Split(s, string(filepath.Separator))
|
||||
for _, dir := range parts {
|
||||
if scm[dir] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var magicHeaders = [][]byte{
|
||||
// Issue #68
|
||||
// PGP messages and signatures are "text" but really just
|
||||
// blobs of base64-text and should not be misspell-checked
|
||||
[]byte("-----BEGIN PGP MESSAGE-----"),
|
||||
[]byte("-----BEGIN PGP SIGNATURE-----"),
|
||||
|
||||
// ELF
|
||||
{0x7f, 0x45, 0x4c, 0x46},
|
||||
|
||||
// Postscript
|
||||
{0x25, 0x21, 0x50, 0x53},
|
||||
|
||||
// PDF
|
||||
{0x25, 0x50, 0x44, 0x46},
|
||||
|
||||
// Java class file
|
||||
// https://en.wikipedia.org/wiki/Java_class_file
|
||||
{0xCA, 0xFE, 0xBA, 0xBE},
|
||||
|
||||
// PNG
|
||||
// https://en.wikipedia.org/wiki/Portable_Network_Graphics
|
||||
{0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a},
|
||||
|
||||
// ZIP, JAR, ODF, OOXML
|
||||
{0x50, 0x4B, 0x03, 0x04},
|
||||
{0x50, 0x4B, 0x05, 0x06},
|
||||
{0x50, 0x4B, 0x07, 0x08},
|
||||
}
|
||||
|
||||
func isTextFile(raw []byte) bool {
|
||||
for _, magic := range magicHeaders {
|
||||
if bytes.HasPrefix(raw, magic) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// allow any text/ type with utf-8 encoding
|
||||
// DetectContentType sometimes returns charset=utf-16 for XML stuff
|
||||
// in which case ignore.
|
||||
mime := http.DetectContentType(raw)
|
||||
return strings.HasPrefix(mime, "text/") && strings.HasSuffix(mime, "charset=utf-8")
|
||||
}
|
||||
|
||||
// ReadTextFile returns the contents of a file, first testing if it is a text file
|
||||
// returns ("", nil) if not a text file
|
||||
// returns ("", error) if error
|
||||
// returns (string, nil) if text
|
||||
//
|
||||
// unfortunately, in worse case, this does
|
||||
// 1 stat
|
||||
// 1 open,read,close of 512 bytes
|
||||
// 1 more stat,open, read everything, close (via ioutil.ReadAll)
|
||||
// This could be kinder to the filesystem.
|
||||
//
|
||||
// This uses some heuristics of the file's extension (e.g. .zip, .txt) and
|
||||
// uses a sniffer to determine if the file is text or not.
|
||||
// Using file extensions isn't great, but probably
|
||||
// good enough for real-world use.
|
||||
// Golang's built in sniffer is problematic for differnet reasons. It's
|
||||
// optimized for HTML, and is very limited in detection. It would be good
|
||||
// to explicitly add some tests for ELF/DWARF formats to make sure we never
|
||||
// corrupt binary files.
|
||||
func ReadTextFile(filename string) (string, error) {
|
||||
if isBinaryFilename(filename) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if isSCMPath(filename) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
fstat, err := os.Stat(filename)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Unable to stat %q: %s", filename, err)
|
||||
}
|
||||
|
||||
// directory: nothing to do.
|
||||
if fstat.IsDir() {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// avoid reading in multi-gig files
|
||||
// if input is large, read the first 512 bytes to sniff type
|
||||
// if not-text, then exit
|
||||
isText := false
|
||||
if fstat.Size() > 50000 {
|
||||
fin, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Unable to open large file %q: %s", filename, err)
|
||||
}
|
||||
defer fin.Close()
|
||||
buf := make([]byte, 512)
|
||||
_, err = io.ReadFull(fin, buf)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Unable to read 512 bytes from %q: %s", filename, err)
|
||||
}
|
||||
if !isTextFile(buf) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// set so we don't double check this file
|
||||
isText = true
|
||||
}
|
||||
|
||||
// read in whole file
|
||||
raw, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Unable to read all %q: %s", filename, err)
|
||||
}
|
||||
|
||||
if !isText && !isTextFile(raw) {
|
||||
return "", nil
|
||||
}
|
||||
return string(raw), nil
|
||||
}
|
85
tools/vendor/github.com/client9/misspell/notwords.go
generated
vendored
Normal file
85
tools/vendor/github.com/client9/misspell/notwords.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
reEmail = regexp.MustCompile(`[a-zA-Z0-9_.%+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z]{2,6}[^a-zA-Z]`)
|
||||
reHost = regexp.MustCompile(`[a-zA-Z0-9-.]+\.[a-zA-Z]+`)
|
||||
reBackslash = regexp.MustCompile(`\\[a-z]`)
|
||||
)
|
||||
|
||||
// RemovePath attempts to strip away embedded file system paths, e.g.
|
||||
// /foo/bar or /static/myimg.png
|
||||
//
|
||||
// TODO: windows style
|
||||
//
|
||||
func RemovePath(s string) string {
|
||||
out := bytes.Buffer{}
|
||||
var idx int
|
||||
for len(s) > 0 {
|
||||
if idx = strings.IndexByte(s, '/'); idx == -1 {
|
||||
out.WriteString(s)
|
||||
break
|
||||
}
|
||||
|
||||
if idx > 0 {
|
||||
idx--
|
||||
}
|
||||
|
||||
var chclass string
|
||||
switch s[idx] {
|
||||
case '/', ' ', '\n', '\t', '\r':
|
||||
chclass = " \n\r\t"
|
||||
case '[':
|
||||
chclass = "]\n"
|
||||
case '(':
|
||||
chclass = ")\n"
|
||||
default:
|
||||
out.WriteString(s[:idx+2])
|
||||
s = s[idx+2:]
|
||||
continue
|
||||
}
|
||||
|
||||
endx := strings.IndexAny(s[idx+1:], chclass)
|
||||
if endx != -1 {
|
||||
out.WriteString(s[:idx+1])
|
||||
out.Write(bytes.Repeat([]byte{' '}, endx))
|
||||
s = s[idx+endx+1:]
|
||||
} else {
|
||||
out.WriteString(s)
|
||||
break
|
||||
}
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
|
||||
// replaceWithBlanks returns a string with the same number of spaces as the input
|
||||
func replaceWithBlanks(s string) string {
|
||||
return strings.Repeat(" ", len(s))
|
||||
}
|
||||
|
||||
// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz"
|
||||
func RemoveEmail(s string) string {
|
||||
return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks)
|
||||
}
|
||||
|
||||
// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz"
|
||||
func RemoveHost(s string) string {
|
||||
return reHost.ReplaceAllStringFunc(s, replaceWithBlanks)
|
||||
}
|
||||
|
||||
// RemoveBackslashEscapes removes characters that are preceeded by a backslash
|
||||
// commonly found in printf format stringd "\nto"
|
||||
func removeBackslashEscapes(s string) string {
|
||||
return reBackslash.ReplaceAllStringFunc(s, replaceWithBlanks)
|
||||
}
|
||||
|
||||
// RemoveNotWords blanks out all the not words
|
||||
func RemoveNotWords(s string) string {
|
||||
// do most selective/specific first
|
||||
return removeBackslashEscapes(RemoveHost(RemoveEmail(RemovePath(StripURL(s)))))
|
||||
}
|
246
tools/vendor/github.com/client9/misspell/replace.go
generated
vendored
Normal file
246
tools/vendor/github.com/client9/misspell/replace.go
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
)
|
||||
|
||||
func max(x, y int) int {
|
||||
if x > y {
|
||||
return x
|
||||
}
|
||||
return y
|
||||
}
|
||||
|
||||
func inArray(haystack []string, needle string) bool {
|
||||
for _, word := range haystack {
|
||||
if needle == word {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`)
|
||||
|
||||
// Diff is datastructure showing what changed in a single line
|
||||
type Diff struct {
|
||||
Filename string
|
||||
FullLine string
|
||||
Line int
|
||||
Column int
|
||||
Original string
|
||||
Corrected string
|
||||
}
|
||||
|
||||
// Replacer is the main struct for spelling correction
|
||||
type Replacer struct {
|
||||
Replacements []string
|
||||
Debug bool
|
||||
engine *StringReplacer
|
||||
corrected map[string]string
|
||||
}
|
||||
|
||||
// New creates a new default Replacer using the main rule list
|
||||
func New() *Replacer {
|
||||
r := Replacer{
|
||||
Replacements: DictMain,
|
||||
}
|
||||
r.Compile()
|
||||
return &r
|
||||
}
|
||||
|
||||
// RemoveRule deletes existings rules.
|
||||
// TODO: make inplace to save memory
|
||||
func (r *Replacer) RemoveRule(ignore []string) {
|
||||
newwords := make([]string, 0, len(r.Replacements))
|
||||
for i := 0; i < len(r.Replacements); i += 2 {
|
||||
if inArray(ignore, r.Replacements[i]) {
|
||||
continue
|
||||
}
|
||||
newwords = append(newwords, r.Replacements[i:i+2]...)
|
||||
}
|
||||
r.engine = nil
|
||||
r.Replacements = newwords
|
||||
}
|
||||
|
||||
// AddRuleList appends new rules.
|
||||
// Input is in the same form as Strings.Replacer: [ old1, new1, old2, new2, ....]
|
||||
// Note: does not check for duplictes
|
||||
func (r *Replacer) AddRuleList(additions []string) {
|
||||
r.engine = nil
|
||||
r.Replacements = append(r.Replacements, additions...)
|
||||
}
|
||||
|
||||
// Compile compiles the rules. Required before using the Replace functions
|
||||
func (r *Replacer) Compile() {
|
||||
|
||||
r.corrected = make(map[string]string, len(r.Replacements)/2)
|
||||
for i := 0; i < len(r.Replacements); i += 2 {
|
||||
r.corrected[r.Replacements[i]] = r.Replacements[i+1]
|
||||
}
|
||||
r.engine = NewStringReplacer(r.Replacements...)
|
||||
}
|
||||
|
||||
/*
|
||||
line1 and line2 are different
|
||||
extract words from each line1
|
||||
|
||||
replace word -> newword
|
||||
if word == new-word
|
||||
continue
|
||||
if new-word in list of replacements
|
||||
continue
|
||||
new word not original, and not in list of replacements
|
||||
some substring got mixed up. UNdo
|
||||
*/
|
||||
func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(Diff)) {
|
||||
first := 0
|
||||
redacted := RemoveNotWords(s)
|
||||
|
||||
idx := wordRegexp.FindAllStringIndex(redacted, -1)
|
||||
for _, ab := range idx {
|
||||
word := s[ab[0]:ab[1]]
|
||||
newword := r.engine.Replace(word)
|
||||
if newword == word {
|
||||
// no replacement done
|
||||
continue
|
||||
}
|
||||
|
||||
// ignore camelCase words
|
||||
// https://github.com/client9/misspell/issues/113
|
||||
if CaseStyle(word) == CaseUnknown {
|
||||
continue
|
||||
}
|
||||
|
||||
if StringEqualFold(r.corrected[strings.ToLower(word)], newword) {
|
||||
// word got corrected into something we know
|
||||
io.WriteString(buf, s[first:ab[0]])
|
||||
io.WriteString(buf, newword)
|
||||
first = ab[1]
|
||||
next(Diff{
|
||||
FullLine: s,
|
||||
Line: lineNum,
|
||||
Original: word,
|
||||
Corrected: newword,
|
||||
Column: ab[0],
|
||||
})
|
||||
continue
|
||||
}
|
||||
// Word got corrected into something unknown. Ignore it
|
||||
}
|
||||
io.WriteString(buf, s[first:])
|
||||
}
|
||||
|
||||
// ReplaceGo is a specialized routine for correcting Golang source
|
||||
// files. Currently only checks comments, not identifiers for
|
||||
// spelling.
|
||||
func (r *Replacer) ReplaceGo(input string) (string, []Diff) {
|
||||
var s scanner.Scanner
|
||||
s.Init(strings.NewReader(input))
|
||||
s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments
|
||||
lastPos := 0
|
||||
output := ""
|
||||
Loop:
|
||||
for {
|
||||
switch s.Scan() {
|
||||
case scanner.Comment:
|
||||
origComment := s.TokenText()
|
||||
newComment := r.engine.Replace(origComment)
|
||||
|
||||
if origComment != newComment {
|
||||
// s.Pos().Offset is the end of the current token
|
||||
// subtract len(origComment) to get the start of the token
|
||||
offset := s.Pos().Offset
|
||||
output = output + input[lastPos:offset-len(origComment)] + newComment
|
||||
lastPos = offset
|
||||
}
|
||||
case scanner.EOF:
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
|
||||
if lastPos == 0 {
|
||||
// no changes, no copies
|
||||
return input, nil
|
||||
}
|
||||
if lastPos < len(input) {
|
||||
output = output + input[lastPos:]
|
||||
}
|
||||
diffs := make([]Diff, 0, 8)
|
||||
buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100))
|
||||
// faster that making a bytes.Buffer and bufio.ReadString
|
||||
outlines := strings.SplitAfter(output, "\n")
|
||||
inlines := strings.SplitAfter(input, "\n")
|
||||
for i := 0; i < len(inlines); i++ {
|
||||
if inlines[i] == outlines[i] {
|
||||
buf.WriteString(outlines[i])
|
||||
continue
|
||||
}
|
||||
r.recheckLine(inlines[i], i+1, buf, func(d Diff) {
|
||||
diffs = append(diffs, d)
|
||||
})
|
||||
}
|
||||
|
||||
return buf.String(), diffs
|
||||
|
||||
}
|
||||
|
||||
// Replace is corrects misspellings in input, returning corrected version
|
||||
// along with a list of diffs.
|
||||
func (r *Replacer) Replace(input string) (string, []Diff) {
|
||||
output := r.engine.Replace(input)
|
||||
if input == output {
|
||||
return input, nil
|
||||
}
|
||||
diffs := make([]Diff, 0, 8)
|
||||
buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100))
|
||||
// faster that making a bytes.Buffer and bufio.ReadString
|
||||
outlines := strings.SplitAfter(output, "\n")
|
||||
inlines := strings.SplitAfter(input, "\n")
|
||||
for i := 0; i < len(inlines); i++ {
|
||||
if inlines[i] == outlines[i] {
|
||||
buf.WriteString(outlines[i])
|
||||
continue
|
||||
}
|
||||
r.recheckLine(inlines[i], i+1, buf, func(d Diff) {
|
||||
diffs = append(diffs, d)
|
||||
})
|
||||
}
|
||||
|
||||
return buf.String(), diffs
|
||||
}
|
||||
|
||||
// ReplaceReader applies spelling corrections to a reader stream. Diffs are
|
||||
// emitted through a callback.
|
||||
func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) error {
|
||||
var (
|
||||
err error
|
||||
line string
|
||||
lineNum int
|
||||
)
|
||||
reader := bufio.NewReader(raw)
|
||||
for err == nil {
|
||||
lineNum++
|
||||
line, err = reader.ReadString('\n')
|
||||
|
||||
// if it's EOF, then line has the last line
|
||||
// don't like the check of err here and
|
||||
// in for loop
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
// easily 5x faster than regexp+map
|
||||
if line == r.engine.Replace(line) {
|
||||
io.WriteString(w, line)
|
||||
continue
|
||||
}
|
||||
// but it can be inaccurate, so we need to double check
|
||||
r.recheckLine(line, lineNum, w, next)
|
||||
}
|
||||
return nil
|
||||
}
|
336
tools/vendor/github.com/client9/misspell/stringreplacer.go
generated
vendored
Normal file
336
tools/vendor/github.com/client9/misspell/stringreplacer.go
generated
vendored
Normal file
@ -0,0 +1,336 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"io"
|
||||
// "log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// StringReplacer replaces a list of strings with replacements.
|
||||
// It is safe for concurrent use by multiple goroutines.
|
||||
type StringReplacer struct {
|
||||
r replacer
|
||||
}
|
||||
|
||||
// replacer is the interface that a replacement algorithm needs to implement.
|
||||
type replacer interface {
|
||||
Replace(s string) string
|
||||
WriteString(w io.Writer, s string) (n int, err error)
|
||||
}
|
||||
|
||||
// NewStringReplacer returns a new Replacer from a list of old, new string pairs.
|
||||
// Replacements are performed in order, without overlapping matches.
|
||||
func NewStringReplacer(oldnew ...string) *StringReplacer {
|
||||
if len(oldnew)%2 == 1 {
|
||||
panic("strings.NewReplacer: odd argument count")
|
||||
}
|
||||
|
||||
return &StringReplacer{r: makeGenericReplacer(oldnew)}
|
||||
}
|
||||
|
||||
// Replace returns a copy of s with all replacements performed.
|
||||
func (r *StringReplacer) Replace(s string) string {
|
||||
return r.r.Replace(s)
|
||||
}
|
||||
|
||||
// WriteString writes s to w with all replacements performed.
|
||||
func (r *StringReplacer) WriteString(w io.Writer, s string) (n int, err error) {
|
||||
return r.r.WriteString(w, s)
|
||||
}
|
||||
|
||||
// trieNode is a node in a lookup trie for prioritized key/value pairs. Keys
|
||||
// and values may be empty. For example, the trie containing keys "ax", "ay",
|
||||
// "bcbc", "x" and "xy" could have eight nodes:
|
||||
//
|
||||
// n0 -
|
||||
// n1 a-
|
||||
// n2 .x+
|
||||
// n3 .y+
|
||||
// n4 b-
|
||||
// n5 .cbc+
|
||||
// n6 x+
|
||||
// n7 .y+
|
||||
//
|
||||
// n0 is the root node, and its children are n1, n4 and n6; n1's children are
|
||||
// n2 and n3; n4's child is n5; n6's child is n7. Nodes n0, n1 and n4 (marked
|
||||
// with a trailing "-") are partial keys, and nodes n2, n3, n5, n6 and n7
|
||||
// (marked with a trailing "+") are complete keys.
|
||||
type trieNode struct {
|
||||
// value is the value of the trie node's key/value pair. It is empty if
|
||||
// this node is not a complete key.
|
||||
value string
|
||||
// priority is the priority (higher is more important) of the trie node's
|
||||
// key/value pair; keys are not necessarily matched shortest- or longest-
|
||||
// first. Priority is positive if this node is a complete key, and zero
|
||||
// otherwise. In the example above, positive/zero priorities are marked
|
||||
// with a trailing "+" or "-".
|
||||
priority int
|
||||
|
||||
// A trie node may have zero, one or more child nodes:
|
||||
// * if the remaining fields are zero, there are no children.
|
||||
// * if prefix and next are non-zero, there is one child in next.
|
||||
// * if table is non-zero, it defines all the children.
|
||||
//
|
||||
// Prefixes are preferred over tables when there is one child, but the
|
||||
// root node always uses a table for lookup efficiency.
|
||||
|
||||
// prefix is the difference in keys between this trie node and the next.
|
||||
// In the example above, node n4 has prefix "cbc" and n4's next node is n5.
|
||||
// Node n5 has no children and so has zero prefix, next and table fields.
|
||||
prefix string
|
||||
next *trieNode
|
||||
|
||||
// table is a lookup table indexed by the next byte in the key, after
|
||||
// remapping that byte through genericReplacer.mapping to create a dense
|
||||
// index. In the example above, the keys only use 'a', 'b', 'c', 'x' and
|
||||
// 'y', which remap to 0, 1, 2, 3 and 4. All other bytes remap to 5, and
|
||||
// genericReplacer.tableSize will be 5. Node n0's table will be
|
||||
// []*trieNode{ 0:n1, 1:n4, 3:n6 }, where the 0, 1 and 3 are the remapped
|
||||
// 'a', 'b' and 'x'.
|
||||
table []*trieNode
|
||||
}
|
||||
|
||||
func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
|
||||
if key == "" {
|
||||
if t.priority == 0 {
|
||||
t.value = val
|
||||
t.priority = priority
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if t.prefix != "" {
|
||||
// Need to split the prefix among multiple nodes.
|
||||
var n int // length of the longest common prefix
|
||||
for ; n < len(t.prefix) && n < len(key); n++ {
|
||||
if t.prefix[n] != key[n] {
|
||||
break
|
||||
}
|
||||
}
|
||||
if n == len(t.prefix) {
|
||||
t.next.add(key[n:], val, priority, r)
|
||||
} else if n == 0 {
|
||||
// First byte differs, start a new lookup table here. Looking up
|
||||
// what is currently t.prefix[0] will lead to prefixNode, and
|
||||
// looking up key[0] will lead to keyNode.
|
||||
var prefixNode *trieNode
|
||||
if len(t.prefix) == 1 {
|
||||
prefixNode = t.next
|
||||
} else {
|
||||
prefixNode = &trieNode{
|
||||
prefix: t.prefix[1:],
|
||||
next: t.next,
|
||||
}
|
||||
}
|
||||
keyNode := new(trieNode)
|
||||
t.table = make([]*trieNode, r.tableSize)
|
||||
t.table[r.mapping[t.prefix[0]]] = prefixNode
|
||||
t.table[r.mapping[key[0]]] = keyNode
|
||||
t.prefix = ""
|
||||
t.next = nil
|
||||
keyNode.add(key[1:], val, priority, r)
|
||||
} else {
|
||||
// Insert new node after the common section of the prefix.
|
||||
next := &trieNode{
|
||||
prefix: t.prefix[n:],
|
||||
next: t.next,
|
||||
}
|
||||
t.prefix = t.prefix[:n]
|
||||
t.next = next
|
||||
next.add(key[n:], val, priority, r)
|
||||
}
|
||||
} else if t.table != nil {
|
||||
// Insert into existing table.
|
||||
m := r.mapping[key[0]]
|
||||
if t.table[m] == nil {
|
||||
t.table[m] = new(trieNode)
|
||||
}
|
||||
t.table[m].add(key[1:], val, priority, r)
|
||||
} else {
|
||||
t.prefix = key
|
||||
t.next = new(trieNode)
|
||||
t.next.add("", val, priority, r)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) {
|
||||
// Iterate down the trie to the end, and grab the value and keylen with
|
||||
// the highest priority.
|
||||
bestPriority := 0
|
||||
node := &r.root
|
||||
n := 0
|
||||
for node != nil {
|
||||
if node.priority > bestPriority && !(ignoreRoot && node == &r.root) {
|
||||
bestPriority = node.priority
|
||||
val = node.value
|
||||
keylen = n
|
||||
found = true
|
||||
}
|
||||
|
||||
if s == "" {
|
||||
break
|
||||
}
|
||||
if node.table != nil {
|
||||
index := r.mapping[ByteToLower(s[0])]
|
||||
if int(index) == r.tableSize {
|
||||
break
|
||||
}
|
||||
node = node.table[index]
|
||||
s = s[1:]
|
||||
n++
|
||||
} else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) {
|
||||
n += len(node.prefix)
|
||||
s = s[len(node.prefix):]
|
||||
node = node.next
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// genericReplacer is the fully generic algorithm.
|
||||
// It's used as a fallback when nothing faster can be used.
|
||||
type genericReplacer struct {
|
||||
root trieNode
|
||||
// tableSize is the size of a trie node's lookup table. It is the number
|
||||
// of unique key bytes.
|
||||
tableSize int
|
||||
// mapping maps from key bytes to a dense index for trieNode.table.
|
||||
mapping [256]byte
|
||||
}
|
||||
|
||||
func makeGenericReplacer(oldnew []string) *genericReplacer {
|
||||
r := new(genericReplacer)
|
||||
// Find each byte used, then assign them each an index.
|
||||
for i := 0; i < len(oldnew); i += 2 {
|
||||
key := strings.ToLower(oldnew[i])
|
||||
for j := 0; j < len(key); j++ {
|
||||
r.mapping[key[j]] = 1
|
||||
}
|
||||
}
|
||||
|
||||
for _, b := range r.mapping {
|
||||
r.tableSize += int(b)
|
||||
}
|
||||
|
||||
var index byte
|
||||
for i, b := range r.mapping {
|
||||
if b == 0 {
|
||||
r.mapping[i] = byte(r.tableSize)
|
||||
} else {
|
||||
r.mapping[i] = index
|
||||
index++
|
||||
}
|
||||
}
|
||||
// Ensure root node uses a lookup table (for performance).
|
||||
r.root.table = make([]*trieNode, r.tableSize)
|
||||
|
||||
for i := 0; i < len(oldnew); i += 2 {
|
||||
r.root.add(strings.ToLower(oldnew[i]), oldnew[i+1], len(oldnew)-i, r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
type appendSliceWriter []byte
|
||||
|
||||
// Write writes to the buffer to satisfy io.Writer.
|
||||
func (w *appendSliceWriter) Write(p []byte) (int, error) {
|
||||
*w = append(*w, p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// WriteString writes to the buffer without string->[]byte->string allocations.
|
||||
func (w *appendSliceWriter) WriteString(s string) (int, error) {
|
||||
*w = append(*w, s...)
|
||||
return len(s), nil
|
||||
}
|
||||
|
||||
type stringWriterIface interface {
|
||||
WriteString(string) (int, error)
|
||||
}
|
||||
|
||||
type stringWriter struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (w stringWriter) WriteString(s string) (int, error) {
|
||||
return w.w.Write([]byte(s))
|
||||
}
|
||||
|
||||
func getStringWriter(w io.Writer) stringWriterIface {
|
||||
sw, ok := w.(stringWriterIface)
|
||||
if !ok {
|
||||
sw = stringWriter{w}
|
||||
}
|
||||
return sw
|
||||
}
|
||||
|
||||
func (r *genericReplacer) Replace(s string) string {
|
||||
buf := make(appendSliceWriter, 0, len(s))
|
||||
r.WriteString(&buf, s)
|
||||
return string(buf)
|
||||
}
|
||||
|
||||
func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) {
|
||||
sw := getStringWriter(w)
|
||||
var last, wn int
|
||||
var prevMatchEmpty bool
|
||||
for i := 0; i <= len(s); {
|
||||
// Fast path: s[i] is not a prefix of any pattern.
|
||||
if i != len(s) && r.root.priority == 0 {
|
||||
index := int(r.mapping[ByteToLower(s[i])])
|
||||
if index == r.tableSize || r.root.table[index] == nil {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the empty match iff the previous loop found the empty match.
|
||||
val, keylen, match := r.lookup(s[i:], prevMatchEmpty)
|
||||
prevMatchEmpty = match && keylen == 0
|
||||
if match {
|
||||
orig := s[i : i+keylen]
|
||||
switch CaseStyle(orig) {
|
||||
case CaseUnknown:
|
||||
// pretend we didn't match
|
||||
// i++
|
||||
// continue
|
||||
case CaseUpper:
|
||||
val = strings.ToUpper(val)
|
||||
case CaseLower:
|
||||
val = strings.ToLower(val)
|
||||
case CaseTitle:
|
||||
if len(val) < 2 {
|
||||
val = strings.ToUpper(val)
|
||||
} else {
|
||||
val = strings.ToUpper(val[:1]) + strings.ToLower(val[1:])
|
||||
}
|
||||
}
|
||||
wn, err = sw.WriteString(s[last:i])
|
||||
n += wn
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
//log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val)
|
||||
wn, err = sw.WriteString(val)
|
||||
n += wn
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
i += keylen
|
||||
last = i
|
||||
continue
|
||||
}
|
||||
i++
|
||||
}
|
||||
if last != len(s) {
|
||||
wn, err = sw.WriteString(s[last:])
|
||||
n += wn
|
||||
}
|
||||
return
|
||||
}
|
421
tools/vendor/github.com/client9/misspell/stringreplacer_test.gox
generated
vendored
Normal file
421
tools/vendor/github.com/client9/misspell/stringreplacer_test.gox
generated
vendored
Normal file
@ -0,0 +1,421 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package misspell_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
. "github.com/client9/misspell"
|
||||
)
|
||||
|
||||
var htmlEscaper = NewStringReplacer(
|
||||
"&", "&",
|
||||
"<", "<",
|
||||
">", ">",
|
||||
`"`, """,
|
||||
"'", "'",
|
||||
)
|
||||
|
||||
var htmlUnescaper = NewStringReplacer(
|
||||
"&", "&",
|
||||
"<", "<",
|
||||
">", ">",
|
||||
""", `"`,
|
||||
"'", "'",
|
||||
)
|
||||
|
||||
// The http package's old HTML escaping function.
|
||||
func oldHTMLEscape(s string) string {
|
||||
s = strings.Replace(s, "&", "&", -1)
|
||||
s = strings.Replace(s, "<", "<", -1)
|
||||
s = strings.Replace(s, ">", ">", -1)
|
||||
s = strings.Replace(s, `"`, """, -1)
|
||||
s = strings.Replace(s, "'", "'", -1)
|
||||
return s
|
||||
}
|
||||
|
||||
var capitalLetters = NewStringReplacer("a", "A", "b", "B")
|
||||
|
||||
// TestReplacer tests the replacer implementations.
|
||||
func TestReplacer(t *testing.T) {
|
||||
type testCase struct {
|
||||
r *StringReplacer
|
||||
in, out string
|
||||
}
|
||||
var testCases []testCase
|
||||
|
||||
// str converts 0xff to "\xff". This isn't just string(b) since that converts to UTF-8.
|
||||
str := func(b byte) string {
|
||||
return string([]byte{b})
|
||||
}
|
||||
var s []string
|
||||
|
||||
// inc maps "\x00"->"\x01", ..., "a"->"b", "b"->"c", ..., "\xff"->"\x00".
|
||||
for i := 0; i < 256; i++ {
|
||||
s = append(s, str(byte(i)), str(byte(i+1)))
|
||||
}
|
||||
inc := NewStringReplacer(s...)
|
||||
|
||||
// Test cases with 1-byte old strings, 1-byte new strings.
|
||||
testCases = append(testCases,
|
||||
testCase{capitalLetters, "brad", "BrAd"},
|
||||
testCase{capitalLetters, strings.Repeat("a", (32<<10)+123), strings.Repeat("A", (32<<10)+123)},
|
||||
testCase{capitalLetters, "", ""},
|
||||
|
||||
testCase{inc, "brad", "csbe"},
|
||||
testCase{inc, "\x00\xff", "\x01\x00"},
|
||||
testCase{inc, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "a", "2"), "brad", "br1d"},
|
||||
)
|
||||
|
||||
// repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ...
|
||||
s = nil
|
||||
for i := 0; i < 256; i++ {
|
||||
n := i + 1 - 'a'
|
||||
if n < 1 {
|
||||
n = 1
|
||||
}
|
||||
s = append(s, str(byte(i)), strings.Repeat(str(byte(i)), n))
|
||||
}
|
||||
repeat := NewStringReplacer(s...)
|
||||
|
||||
// Test cases with 1-byte old strings, variable length new strings.
|
||||
testCases = append(testCases,
|
||||
testCase{htmlEscaper, "No changes", "No changes"},
|
||||
testCase{htmlEscaper, "I <3 escaping & stuff", "I <3 escaping & stuff"},
|
||||
testCase{htmlEscaper, "&&&", "&&&"},
|
||||
testCase{htmlEscaper, "", ""},
|
||||
|
||||
testCase{repeat, "brad", "bbrrrrrrrrrrrrrrrrrradddd"},
|
||||
testCase{repeat, "abba", "abbbba"},
|
||||
testCase{repeat, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "11", "a", "22"), "brad", "br11d"},
|
||||
)
|
||||
|
||||
// The remaining test cases have variable length old strings.
|
||||
|
||||
testCases = append(testCases,
|
||||
testCase{htmlUnescaper, "&amp;", "&"},
|
||||
testCase{htmlUnescaper, "<b>HTML's neat</b>", "<b>HTML's neat</b>"},
|
||||
testCase{htmlUnescaper, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "a", "2", "xxx", "xxx"), "brad", "br1d"},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "aa", "2", "aaa", "3"), "aaaa", "1111"},
|
||||
|
||||
testCase{NewStringReplacer("aaa", "3", "aa", "2", "a", "1"), "aaaa", "31"},
|
||||
)
|
||||
|
||||
// gen1 has multiple old strings of variable length. There is no
|
||||
// overall non-empty common prefix, but some pairwise common prefixes.
|
||||
gen1 := NewStringReplacer(
|
||||
"aaa", "3[aaa]",
|
||||
"aa", "2[aa]",
|
||||
"a", "1[a]",
|
||||
"i", "i",
|
||||
"longerst", "most long",
|
||||
"longer", "medium",
|
||||
"long", "short",
|
||||
"xx", "xx",
|
||||
"x", "X",
|
||||
"X", "Y",
|
||||
"Y", "Z",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen1, "fooaaabar", "foo3[aaa]b1[a]r"},
|
||||
testCase{gen1, "long, longerst, longer", "short, most long, medium"},
|
||||
testCase{gen1, "xxxxx", "xxxxX"},
|
||||
testCase{gen1, "XiX", "YiY"},
|
||||
testCase{gen1, "", ""},
|
||||
)
|
||||
|
||||
// gen2 has multiple old strings with no pairwise common prefix.
|
||||
gen2 := NewStringReplacer(
|
||||
"roses", "red",
|
||||
"violets", "blue",
|
||||
"sugar", "sweet",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen2, "roses are red, violets are blue...", "red are red, blue are blue..."},
|
||||
testCase{gen2, "", ""},
|
||||
)
|
||||
|
||||
// gen3 has multiple old strings with an overall common prefix.
|
||||
gen3 := NewStringReplacer(
|
||||
"abracadabra", "poof",
|
||||
"abracadabrakazam", "splat",
|
||||
"abraham", "lincoln",
|
||||
"abrasion", "scrape",
|
||||
"abraham", "isaac",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen3, "abracadabrakazam abraham", "poofkazam lincoln"},
|
||||
testCase{gen3, "abrasion abracad", "scrape abracad"},
|
||||
testCase{gen3, "abba abram abrasive", "abba abram abrasive"},
|
||||
testCase{gen3, "", ""},
|
||||
)
|
||||
|
||||
// foo{1,2,3,4} have multiple old strings with an overall common prefix
|
||||
// and 1- or 2- byte extensions from the common prefix.
|
||||
foo1 := NewStringReplacer(
|
||||
"foo1", "A",
|
||||
"foo2", "B",
|
||||
"foo3", "C",
|
||||
)
|
||||
foo2 := NewStringReplacer(
|
||||
"foo1", "A",
|
||||
"foo2", "B",
|
||||
"foo31", "C",
|
||||
"foo32", "D",
|
||||
)
|
||||
foo3 := NewStringReplacer(
|
||||
"foo11", "A",
|
||||
"foo12", "B",
|
||||
"foo31", "C",
|
||||
"foo32", "D",
|
||||
)
|
||||
foo4 := NewStringReplacer(
|
||||
"foo12", "B",
|
||||
"foo32", "D",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{foo1, "fofoofoo12foo32oo", "fofooA2C2oo"},
|
||||
testCase{foo1, "", ""},
|
||||
|
||||
testCase{foo2, "fofoofoo12foo32oo", "fofooA2Doo"},
|
||||
testCase{foo2, "", ""},
|
||||
|
||||
testCase{foo3, "fofoofoo12foo32oo", "fofooBDoo"},
|
||||
testCase{foo3, "", ""},
|
||||
|
||||
testCase{foo4, "fofoofoo12foo32oo", "fofooBDoo"},
|
||||
testCase{foo4, "", ""},
|
||||
)
|
||||
|
||||
// genAll maps "\x00\x01\x02...\xfe\xff" to "[all]", amongst other things.
|
||||
allBytes := make([]byte, 256)
|
||||
for i := range allBytes {
|
||||
allBytes[i] = byte(i)
|
||||
}
|
||||
allString := string(allBytes)
|
||||
genAll := NewStringReplacer(
|
||||
allString, "[all]",
|
||||
"\xff", "[ff]",
|
||||
"\x00", "[00]",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{genAll, allString, "[all]"},
|
||||
testCase{genAll, "a\xff" + allString + "\x00", "a[ff][all][00]"},
|
||||
testCase{genAll, "", ""},
|
||||
)
|
||||
|
||||
// Test cases with empty old strings.
|
||||
|
||||
blankToX1 := NewStringReplacer("", "X")
|
||||
blankToX2 := NewStringReplacer("", "X", "", "")
|
||||
blankHighPriority := NewStringReplacer("", "X", "o", "O")
|
||||
blankLowPriority := NewStringReplacer("o", "O", "", "X")
|
||||
blankNoOp1 := NewStringReplacer("", "")
|
||||
blankNoOp2 := NewStringReplacer("", "", "", "A")
|
||||
blankFoo := NewStringReplacer("", "X", "foobar", "R", "foobaz", "Z")
|
||||
testCases = append(testCases,
|
||||
testCase{blankToX1, "foo", "XfXoXoX"},
|
||||
testCase{blankToX1, "", "X"},
|
||||
|
||||
testCase{blankToX2, "foo", "XfXoXoX"},
|
||||
testCase{blankToX2, "", "X"},
|
||||
|
||||
testCase{blankHighPriority, "oo", "XOXOX"},
|
||||
testCase{blankHighPriority, "ii", "XiXiX"},
|
||||
testCase{blankHighPriority, "oiio", "XOXiXiXOX"},
|
||||
testCase{blankHighPriority, "iooi", "XiXOXOXiX"},
|
||||
testCase{blankHighPriority, "", "X"},
|
||||
|
||||
testCase{blankLowPriority, "oo", "OOX"},
|
||||
testCase{blankLowPriority, "ii", "XiXiX"},
|
||||
testCase{blankLowPriority, "oiio", "OXiXiOX"},
|
||||
testCase{blankLowPriority, "iooi", "XiOOXiX"},
|
||||
testCase{blankLowPriority, "", "X"},
|
||||
|
||||
testCase{blankNoOp1, "foo", "foo"},
|
||||
testCase{blankNoOp1, "", ""},
|
||||
|
||||
testCase{blankNoOp2, "foo", "foo"},
|
||||
testCase{blankNoOp2, "", ""},
|
||||
|
||||
testCase{blankFoo, "foobarfoobaz", "XRXZX"},
|
||||
testCase{blankFoo, "foobar-foobaz", "XRX-XZX"},
|
||||
testCase{blankFoo, "", "X"},
|
||||
)
|
||||
|
||||
// single string replacer
|
||||
|
||||
abcMatcher := NewStringReplacer("abc", "[match]")
|
||||
|
||||
testCases = append(testCases,
|
||||
testCase{abcMatcher, "", ""},
|
||||
testCase{abcMatcher, "ab", "ab"},
|
||||
testCase{abcMatcher, "abc", "[match]"},
|
||||
testCase{abcMatcher, "abcd", "[match]d"},
|
||||
testCase{abcMatcher, "cabcabcdabca", "c[match][match]d[match]a"},
|
||||
)
|
||||
|
||||
// Issue 6659 cases (more single string replacer)
|
||||
|
||||
noHello := NewStringReplacer("Hello", "")
|
||||
testCases = append(testCases,
|
||||
testCase{noHello, "Hello", ""},
|
||||
testCase{noHello, "Hellox", "x"},
|
||||
testCase{noHello, "xHello", "x"},
|
||||
testCase{noHello, "xHellox", "xx"},
|
||||
)
|
||||
|
||||
// No-arg test cases.
|
||||
|
||||
nop := NewStringReplacer()
|
||||
testCases = append(testCases,
|
||||
testCase{nop, "abc", "abc"},
|
||||
testCase{nop, "", ""},
|
||||
)
|
||||
|
||||
// Run the test cases.
|
||||
|
||||
for i, tc := range testCases {
|
||||
if s := tc.r.Replace(tc.in); s != tc.out {
|
||||
t.Errorf("%d. strings.Replace(%q) = %q, want %q", i, tc.in, s, tc.out)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
n, err := tc.r.WriteString(&buf, tc.in)
|
||||
if err != nil {
|
||||
t.Errorf("%d. WriteString: %v", i, err)
|
||||
continue
|
||||
}
|
||||
got := buf.String()
|
||||
if got != tc.out {
|
||||
t.Errorf("%d. WriteString(%q) wrote %q, want %q", i, tc.in, got, tc.out)
|
||||
continue
|
||||
}
|
||||
if n != len(tc.out) {
|
||||
t.Errorf("%d. WriteString(%q) wrote correct string but reported %d bytes; want %d (%q)",
|
||||
i, tc.in, n, len(tc.out), tc.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type errWriter struct{}
|
||||
|
||||
func (errWriter) Write(p []byte) (n int, err error) {
|
||||
return 0, fmt.Errorf("unwritable")
|
||||
}
|
||||
|
||||
func BenchmarkGenericNoMatch(b *testing.B) {
|
||||
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
|
||||
generic := NewStringReplacer("a", "A", "b", "B", "12", "123") // varying lengths forces generic
|
||||
for i := 0; i < b.N; i++ {
|
||||
generic.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGenericMatch1(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
generic := NewStringReplacer("a", "A", "b", "B", "12", "123")
|
||||
for i := 0; i < b.N; i++ {
|
||||
generic.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGenericMatch2(b *testing.B) {
|
||||
str := strings.Repeat("It's <b>HTML</b>!", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlUnescaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSingleString(b *testing.B, pattern, text string) {
|
||||
r := NewStringReplacer(pattern, "[match]")
|
||||
b.SetBytes(int64(len(text)))
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
r.Replace(text)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSingleMaxSkipping(b *testing.B) {
|
||||
benchmarkSingleString(b, strings.Repeat("b", 25), strings.Repeat("a", 10000))
|
||||
}
|
||||
|
||||
func BenchmarkSingleLongSuffixFail(b *testing.B) {
|
||||
benchmarkSingleString(b, "b"+strings.Repeat("a", 500), strings.Repeat("a", 1002))
|
||||
}
|
||||
|
||||
func BenchmarkSingleMatch(b *testing.B) {
|
||||
benchmarkSingleString(b, "abcdef", strings.Repeat("abcdefghijklmno", 1000))
|
||||
}
|
||||
|
||||
func BenchmarkByteByteNoMatch(b *testing.B) {
|
||||
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteByteMatch(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteStringMatch(b *testing.B) {
|
||||
str := "<" + strings.Repeat("a", 99) + strings.Repeat("b", 99) + ">"
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHTMLEscapeNew(b *testing.B) {
|
||||
str := "I <3 to escape HTML & other text too."
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHTMLEscapeOld(b *testing.B) {
|
||||
str := "I <3 to escape HTML & other text too."
|
||||
for i := 0; i < b.N; i++ {
|
||||
oldHTMLEscape(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteStringReplacerWriteString(b *testing.B) {
|
||||
str := strings.Repeat("I <3 to escape HTML & other text too.", 100)
|
||||
buf := new(bytes.Buffer)
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.WriteString(buf, str)
|
||||
buf.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteReplacerWriteString(b *testing.B) {
|
||||
str := strings.Repeat("abcdefghijklmnopqrstuvwxyz", 100)
|
||||
buf := new(bytes.Buffer)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.WriteString(buf, str)
|
||||
buf.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkByteByteReplaces compares byteByteImpl against multiple Replaces.
|
||||
func BenchmarkByteByteReplaces(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.Replace(strings.Replace(str, "a", "A", -1), "b", "B", -1)
|
||||
}
|
||||
}
|
17
tools/vendor/github.com/client9/misspell/url.go
generated
vendored
Normal file
17
tools/vendor/github.com/client9/misspell/url.go
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
package misspell
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// Regexp for URL https://mathiasbynens.be/demo/url-regex
|
||||
//
|
||||
// original @imme_emosol (54 chars) has trouble with dashes in hostname
|
||||
// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS
|
||||
var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?\.#]+\.?)+(/[^\s]*)?`)
|
||||
|
||||
// StripURL attemps to replace URLs with blank spaces, e.g.
|
||||
// "xxx http://foo.com/ yyy -> "xxx yyyy"
|
||||
func StripURL(s string) string {
|
||||
return reURL.ReplaceAllStringFunc(s, replaceWithBlanks)
|
||||
}
|
31153
tools/vendor/github.com/client9/misspell/words.go
generated
vendored
Normal file
31153
tools/vendor/github.com/client9/misspell/words.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user