Co-authored-by: techknowlogick <techknowlogick@gitea.io>tags/v1.21.12.1
| @@ -339,7 +339,7 @@ steps: | |||||
| pull: default | pull: default | ||||
| image: alpine:3.11 | image: alpine:3.11 | ||||
| commands: | commands: | ||||
| - ./scripts/update-locales.sh | |||||
| - ./build/update-locales.sh | |||||
| - name: push | - name: push | ||||
| pull: always | pull: always | ||||
| @@ -6,5 +6,5 @@ conf/* linguist-vendored | |||||
| docker/* linguist-vendored | docker/* linguist-vendored | ||||
| options/* linguist-vendored | options/* linguist-vendored | ||||
| public/* linguist-vendored | public/* linguist-vendored | ||||
| scripts/* linguist-vendored | |||||
| build/* linguist-vendored | |||||
| templates/* linguist-vendored | templates/* linguist-vendored | ||||
| @@ -80,7 +80,7 @@ TAGS ?= | |||||
| TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS)) | TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS)) | ||||
| TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags | TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags | ||||
| GO_DIRS := cmd integrations models modules routers scripts services vendor | |||||
| GO_DIRS := cmd integrations models modules routers build services vendor | |||||
| GO_SOURCES := $(wildcard *.go) | GO_SOURCES := $(wildcard *.go) | ||||
| GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go" -not -path modules/options/bindata.go -not -path modules/public/bindata.go -not -path modules/templates/bindata.go) | GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go" -not -path modules/options/bindata.go -not -path modules/public/bindata.go -not -path modules/templates/bindata.go) | ||||
| @@ -234,10 +234,7 @@ errcheck: | |||||
| .PHONY: revive | .PHONY: revive | ||||
| revive: | revive: | ||||
| @hash revive > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ | |||||
| $(GO) get -u github.com/mgechev/revive; \ | |||||
| fi | |||||
| revive -config .revive.toml -exclude=./vendor/... ./... || exit 1 | |||||
| GO111MODULE=on $(GO) run -mod=vendor build/lint.go -config .revive.toml -exclude=./vendor/... ./... || exit 1 | |||||
| .PHONY: misspell-check | .PHONY: misspell-check | ||||
| misspell-check: | misspell-check: | ||||
| @@ -0,0 +1,325 @@ | |||||
| // Copyright 2020 The Gitea Authors. All rights reserved. | |||||
| // Copyright (c) 2018 Minko Gechev. All rights reserved. | |||||
| // Use of this source code is governed by a MIT-style | |||||
| // license that can be found in the LICENSE file. | |||||
| // +build ignore | |||||
| package main | |||||
| import ( | |||||
| "flag" | |||||
| "fmt" | |||||
| "io/ioutil" | |||||
| "os" | |||||
| "path/filepath" | |||||
| "strings" | |||||
| "github.com/BurntSushi/toml" | |||||
| "github.com/mgechev/dots" | |||||
| "github.com/mgechev/revive/formatter" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "github.com/mgechev/revive/rule" | |||||
| "github.com/mitchellh/go-homedir" | |||||
| ) | |||||
| func fail(err string) { | |||||
| fmt.Fprintln(os.Stderr, err) | |||||
| os.Exit(1) | |||||
| } | |||||
| var defaultRules = []lint.Rule{ | |||||
| &rule.VarDeclarationsRule{}, | |||||
| &rule.PackageCommentsRule{}, | |||||
| &rule.DotImportsRule{}, | |||||
| &rule.BlankImportsRule{}, | |||||
| &rule.ExportedRule{}, | |||||
| &rule.VarNamingRule{}, | |||||
| &rule.IndentErrorFlowRule{}, | |||||
| &rule.IfReturnRule{}, | |||||
| &rule.RangeRule{}, | |||||
| &rule.ErrorfRule{}, | |||||
| &rule.ErrorNamingRule{}, | |||||
| &rule.ErrorStringsRule{}, | |||||
| &rule.ReceiverNamingRule{}, | |||||
| &rule.IncrementDecrementRule{}, | |||||
| &rule.ErrorReturnRule{}, | |||||
| &rule.UnexportedReturnRule{}, | |||||
| &rule.TimeNamingRule{}, | |||||
| &rule.ContextKeysType{}, | |||||
| &rule.ContextAsArgumentRule{}, | |||||
| } | |||||
| var allRules = append([]lint.Rule{ | |||||
| &rule.ArgumentsLimitRule{}, | |||||
| &rule.CyclomaticRule{}, | |||||
| &rule.FileHeaderRule{}, | |||||
| &rule.EmptyBlockRule{}, | |||||
| &rule.SuperfluousElseRule{}, | |||||
| &rule.ConfusingNamingRule{}, | |||||
| &rule.GetReturnRule{}, | |||||
| &rule.ModifiesParamRule{}, | |||||
| &rule.ConfusingResultsRule{}, | |||||
| &rule.DeepExitRule{}, | |||||
| &rule.UnusedParamRule{}, | |||||
| &rule.UnreachableCodeRule{}, | |||||
| &rule.AddConstantRule{}, | |||||
| &rule.FlagParamRule{}, | |||||
| &rule.UnnecessaryStmtRule{}, | |||||
| &rule.StructTagRule{}, | |||||
| &rule.ModifiesValRecRule{}, | |||||
| &rule.ConstantLogicalExprRule{}, | |||||
| &rule.BoolLiteralRule{}, | |||||
| &rule.RedefinesBuiltinIDRule{}, | |||||
| &rule.ImportsBlacklistRule{}, | |||||
| &rule.FunctionResultsLimitRule{}, | |||||
| &rule.MaxPublicStructsRule{}, | |||||
| &rule.RangeValInClosureRule{}, | |||||
| &rule.RangeValAddress{}, | |||||
| &rule.WaitGroupByValueRule{}, | |||||
| &rule.AtomicRule{}, | |||||
| &rule.EmptyLinesRule{}, | |||||
| &rule.LineLengthLimitRule{}, | |||||
| &rule.CallToGCRule{}, | |||||
| &rule.DuplicatedImportsRule{}, | |||||
| &rule.ImportShadowingRule{}, | |||||
| &rule.BareReturnRule{}, | |||||
| &rule.UnusedReceiverRule{}, | |||||
| &rule.UnhandledErrorRule{}, | |||||
| &rule.CognitiveComplexityRule{}, | |||||
| &rule.StringOfIntRule{}, | |||||
| }, defaultRules...) | |||||
| var allFormatters = []lint.Formatter{ | |||||
| &formatter.Stylish{}, | |||||
| &formatter.Friendly{}, | |||||
| &formatter.JSON{}, | |||||
| &formatter.NDJSON{}, | |||||
| &formatter.Default{}, | |||||
| &formatter.Unix{}, | |||||
| &formatter.Checkstyle{}, | |||||
| &formatter.Plain{}, | |||||
| } | |||||
| func getFormatters() map[string]lint.Formatter { | |||||
| result := map[string]lint.Formatter{} | |||||
| for _, f := range allFormatters { | |||||
| result[f.Name()] = f | |||||
| } | |||||
| return result | |||||
| } | |||||
| func getLintingRules(config *lint.Config) []lint.Rule { | |||||
| rulesMap := map[string]lint.Rule{} | |||||
| for _, r := range allRules { | |||||
| rulesMap[r.Name()] = r | |||||
| } | |||||
| lintingRules := []lint.Rule{} | |||||
| for name := range config.Rules { | |||||
| rule, ok := rulesMap[name] | |||||
| if !ok { | |||||
| fail("cannot find rule: " + name) | |||||
| } | |||||
| lintingRules = append(lintingRules, rule) | |||||
| } | |||||
| return lintingRules | |||||
| } | |||||
| func parseConfig(path string) *lint.Config { | |||||
| config := &lint.Config{} | |||||
| file, err := ioutil.ReadFile(path) | |||||
| if err != nil { | |||||
| fail("cannot read the config file") | |||||
| } | |||||
| _, err = toml.Decode(string(file), config) | |||||
| if err != nil { | |||||
| fail("cannot parse the config file: " + err.Error()) | |||||
| } | |||||
| return config | |||||
| } | |||||
| func normalizeConfig(config *lint.Config) { | |||||
| if config.Confidence == 0 { | |||||
| config.Confidence = 0.8 | |||||
| } | |||||
| severity := config.Severity | |||||
| if severity != "" { | |||||
| for k, v := range config.Rules { | |||||
| if v.Severity == "" { | |||||
| v.Severity = severity | |||||
| } | |||||
| config.Rules[k] = v | |||||
| } | |||||
| for k, v := range config.Directives { | |||||
| if v.Severity == "" { | |||||
| v.Severity = severity | |||||
| } | |||||
| config.Directives[k] = v | |||||
| } | |||||
| } | |||||
| } | |||||
| func getConfig() *lint.Config { | |||||
| config := defaultConfig() | |||||
| if configPath != "" { | |||||
| config = parseConfig(configPath) | |||||
| } | |||||
| normalizeConfig(config) | |||||
| return config | |||||
| } | |||||
| func getFormatter() lint.Formatter { | |||||
| formatters := getFormatters() | |||||
| formatter := formatters["default"] | |||||
| if formatterName != "" { | |||||
| f, ok := formatters[formatterName] | |||||
| if !ok { | |||||
| fail("unknown formatter " + formatterName) | |||||
| } | |||||
| formatter = f | |||||
| } | |||||
| return formatter | |||||
| } | |||||
| func buildDefaultConfigPath() string { | |||||
| var result string | |||||
| if homeDir, err := homedir.Dir(); err == nil { | |||||
| result = filepath.Join(homeDir, "revive.toml") | |||||
| if _, err := os.Stat(result); err != nil { | |||||
| result = "" | |||||
| } | |||||
| } | |||||
| return result | |||||
| } | |||||
| func defaultConfig() *lint.Config { | |||||
| defaultConfig := lint.Config{ | |||||
| Confidence: 0.0, | |||||
| Severity: lint.SeverityWarning, | |||||
| Rules: map[string]lint.RuleConfig{}, | |||||
| } | |||||
| for _, r := range defaultRules { | |||||
| defaultConfig.Rules[r.Name()] = lint.RuleConfig{} | |||||
| } | |||||
| return &defaultConfig | |||||
| } | |||||
| func normalizeSplit(strs []string) []string { | |||||
| res := []string{} | |||||
| for _, s := range strs { | |||||
| t := strings.Trim(s, " \t") | |||||
| if len(t) > 0 { | |||||
| res = append(res, t) | |||||
| } | |||||
| } | |||||
| return res | |||||
| } | |||||
| func getPackages() [][]string { | |||||
| globs := normalizeSplit(flag.Args()) | |||||
| if len(globs) == 0 { | |||||
| globs = append(globs, ".") | |||||
| } | |||||
| packages, err := dots.ResolvePackages(globs, normalizeSplit(excludePaths)) | |||||
| if err != nil { | |||||
| fail(err.Error()) | |||||
| } | |||||
| return packages | |||||
| } | |||||
| type arrayFlags []string | |||||
| func (i *arrayFlags) String() string { | |||||
| return strings.Join([]string(*i), " ") | |||||
| } | |||||
| func (i *arrayFlags) Set(value string) error { | |||||
| *i = append(*i, value) | |||||
| return nil | |||||
| } | |||||
| var configPath string | |||||
| var excludePaths arrayFlags | |||||
| var formatterName string | |||||
| var help bool | |||||
| var originalUsage = flag.Usage | |||||
| func init() { | |||||
| flag.Usage = func() { | |||||
| originalUsage() | |||||
| } | |||||
| // command line help strings | |||||
| const ( | |||||
| configUsage = "path to the configuration TOML file, defaults to $HOME/revive.toml, if present (i.e. -config myconf.toml)" | |||||
| excludeUsage = "list of globs which specify files to be excluded (i.e. -exclude foo/...)" | |||||
| formatterUsage = "formatter to be used for the output (i.e. -formatter stylish)" | |||||
| ) | |||||
| defaultConfigPath := buildDefaultConfigPath() | |||||
| flag.StringVar(&configPath, "config", defaultConfigPath, configUsage) | |||||
| flag.Var(&excludePaths, "exclude", excludeUsage) | |||||
| flag.StringVar(&formatterName, "formatter", "", formatterUsage) | |||||
| flag.Parse() | |||||
| } | |||||
| func main() { | |||||
| config := getConfig() | |||||
| formatter := getFormatter() | |||||
| packages := getPackages() | |||||
| revive := lint.New(func(file string) ([]byte, error) { | |||||
| return ioutil.ReadFile(file) | |||||
| }) | |||||
| lintingRules := getLintingRules(config) | |||||
| failures, err := revive.Lint(packages, lintingRules, *config) | |||||
| if err != nil { | |||||
| fail(err.Error()) | |||||
| } | |||||
| formatChan := make(chan lint.Failure) | |||||
| exitChan := make(chan bool) | |||||
| var output string | |||||
| go (func() { | |||||
| output, err = formatter.Format(formatChan, *config) | |||||
| if err != nil { | |||||
| fail(err.Error()) | |||||
| } | |||||
| exitChan <- true | |||||
| })() | |||||
| exitCode := 0 | |||||
| for f := range failures { | |||||
| if f.Confidence < config.Confidence { | |||||
| continue | |||||
| } | |||||
| if exitCode == 0 { | |||||
| exitCode = config.WarningCode | |||||
| } | |||||
| if c, ok := config.Rules[f.RuleName]; ok && c.Severity == lint.SeverityError { | |||||
| exitCode = config.ErrorCode | |||||
| } | |||||
| if c, ok := config.Directives[f.RuleName]; ok && c.Severity == lint.SeverityError { | |||||
| exitCode = config.ErrorCode | |||||
| } | |||||
| formatChan <- f | |||||
| } | |||||
| close(formatChan) | |||||
| <-exitChan | |||||
| if output != "" { | |||||
| fmt.Println(output) | |||||
| } | |||||
| os.Exit(exitCode) | |||||
| } | |||||
| @@ -0,0 +1,18 @@ | |||||
| // Copyright 2020 The Gitea Authors. All rights reserved. | |||||
| // Use of this source code is governed by a MIT-style | |||||
| // license that can be found in the LICENSE file. | |||||
| package build | |||||
| import ( | |||||
| // for lint | |||||
| _ "github.com/BurntSushi/toml" | |||||
| _ "github.com/mgechev/dots" | |||||
| _ "github.com/mgechev/revive/formatter" | |||||
| _ "github.com/mgechev/revive/lint" | |||||
| _ "github.com/mgechev/revive/rule" | |||||
| _ "github.com/mitchellh/go-homedir" | |||||
| // for embed | |||||
| _ "github.com/shurcooL/vfsgen" | |||||
| ) | |||||
| @@ -16,6 +16,7 @@ require ( | |||||
| gitea.com/macaron/macaron v1.4.0 | gitea.com/macaron/macaron v1.4.0 | ||||
| gitea.com/macaron/session v0.0.0-20191207215012-613cebf0674d | gitea.com/macaron/session v0.0.0-20191207215012-613cebf0674d | ||||
| gitea.com/macaron/toolbox v0.0.0-20190822013122-05ff0fc766b7 | gitea.com/macaron/toolbox v0.0.0-20190822013122-05ff0fc766b7 | ||||
| github.com/BurntSushi/toml v0.3.1 | |||||
| github.com/PuerkitoBio/goquery v1.5.0 | github.com/PuerkitoBio/goquery v1.5.0 | ||||
| github.com/RoaringBitmap/roaring v0.4.21 // indirect | github.com/RoaringBitmap/roaring v0.4.21 // indirect | ||||
| github.com/bgentry/speakeasy v0.1.0 // indirect | github.com/bgentry/speakeasy v0.1.0 // indirect | ||||
| @@ -67,17 +68,20 @@ require ( | |||||
| github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 | github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 | ||||
| github.com/mailru/easyjson v0.7.0 // indirect | github.com/mailru/easyjson v0.7.0 // indirect | ||||
| github.com/markbates/goth v1.61.2 | github.com/markbates/goth v1.61.2 | ||||
| github.com/mattn/go-isatty v0.0.7 | |||||
| github.com/mattn/go-isatty v0.0.11 | |||||
| github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect | github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect | ||||
| github.com/mattn/go-sqlite3 v1.11.0 | github.com/mattn/go-sqlite3 v1.11.0 | ||||
| github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75 | github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75 | ||||
| github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 | |||||
| github.com/mgechev/revive v1.0.2 | |||||
| github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a | github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a | ||||
| github.com/mitchellh/go-homedir v1.1.0 | |||||
| github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc | github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc | ||||
| github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 | github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 | ||||
| github.com/niklasfasching/go-org v0.1.9 | github.com/niklasfasching/go-org v0.1.9 | ||||
| github.com/oliamb/cutter v0.2.2 | github.com/oliamb/cutter v0.2.2 | ||||
| github.com/olivere/elastic/v7 v7.0.9 | github.com/olivere/elastic/v7 v7.0.9 | ||||
| github.com/pkg/errors v0.8.1 | |||||
| github.com/pkg/errors v0.9.1 | |||||
| github.com/pquerna/otp v0.0.0-20160912161815-54653902c20e | github.com/pquerna/otp v0.0.0-20160912161815-54653902c20e | ||||
| github.com/prometheus/client_golang v1.1.0 | github.com/prometheus/client_golang v1.1.0 | ||||
| github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 // indirect | github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 // indirect | ||||
| @@ -107,7 +111,6 @@ require ( | |||||
| golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 | golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 | ||||
| golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 | golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 | ||||
| golang.org/x/text v0.3.2 | golang.org/x/text v0.3.2 | ||||
| golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect | |||||
| gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | ||||
| gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect | gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect | ||||
| gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df | gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df | ||||
| @@ -157,6 +157,10 @@ github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojt | |||||
| github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= | github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= | ||||
| github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 h1:E2s37DuLxFhQDg5gKsWoLBOB0n+ZW8s599zru8FJ2/Y= | github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 h1:E2s37DuLxFhQDg5gKsWoLBOB0n+ZW8s599zru8FJ2/Y= | ||||
| github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= | github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= | ||||
| github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= | |||||
| github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= | |||||
| github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= | |||||
| github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= | |||||
| github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= | github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= | ||||
| github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= | github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= | ||||
| github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= | github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= | ||||
| @@ -184,6 +188,7 @@ github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmC | |||||
| github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= | github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= | ||||
| github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= | github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= | ||||
| github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= | github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= | ||||
| github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= | |||||
| github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= | github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= | ||||
| github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= | github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= | ||||
| github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= | github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= | ||||
| @@ -399,10 +404,15 @@ github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7 | |||||
| github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA= | github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA= | ||||
| github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs= | github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs= | ||||
| github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY= | github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY= | ||||
| github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc= | |||||
| github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= | |||||
| github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= | |||||
| github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= | |||||
| github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= | |||||
| github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM= | |||||
| github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= | |||||
| github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g= | github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g= | ||||
| github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8= | github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8= | ||||
| github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54= | |||||
| github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= | |||||
| github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= | github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= | ||||
| github.com/mattn/go-sqlite3 v1.11.0 h1:LDdKkqtYlom37fkvqs8rMPFKAMe8+SgjbwZ6ex1/A/Q= | github.com/mattn/go-sqlite3 v1.11.0 h1:LDdKkqtYlom37fkvqs8rMPFKAMe8+SgjbwZ6ex1/A/Q= | ||||
| github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= | github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= | ||||
| @@ -410,6 +420,10 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j | |||||
| github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= | github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= | ||||
| github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75 h1:Pijfgr7ZuvX7QIQiEwLdRVr3RoMG+i0SbBO1Qu+7yVk= | github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75 h1:Pijfgr7ZuvX7QIQiEwLdRVr3RoMG+i0SbBO1Qu+7yVk= | ||||
| github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75/go.mod h1:76rfSfYPWj01Z85hUf/ituArm797mNKcvINh1OlsZKo= | github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75/go.mod h1:76rfSfYPWj01Z85hUf/ituArm797mNKcvINh1OlsZKo= | ||||
| github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 h1:QASJXOGm2RZ5Ardbc86qNFvby9AqkLDibfChMtAg5QM= | |||||
| github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= | |||||
| github.com/mgechev/revive v1.0.2 h1:v0NxxQ7fSFz/u1NQydPo6EGdq7va0J1BtsZmae6kzUg= | |||||
| github.com/mgechev/revive v1.0.2/go.mod h1:rb0dQy1LVAxW9SWy5R3LPUjevzUbUS316U5MFySA2lo= | |||||
| github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a h1:d18LCO3ctH2kugUqt0pEyKKP8L+IYrocaPqGFilhTKk= | github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a h1:d18LCO3ctH2kugUqt0pEyKKP8L+IYrocaPqGFilhTKk= | ||||
| github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= | github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= | ||||
| github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= | github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= | ||||
| @@ -434,6 +448,8 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLA | |||||
| github.com/niklasfasching/go-org v0.1.9 h1:Toz8WMIt+qJb52uYEk1YD/muLuOOmRt1CfkV+bKVMkI= | github.com/niklasfasching/go-org v0.1.9 h1:Toz8WMIt+qJb52uYEk1YD/muLuOOmRt1CfkV+bKVMkI= | ||||
| github.com/niklasfasching/go-org v0.1.9/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU= | github.com/niklasfasching/go-org v0.1.9/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU= | ||||
| github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= | github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= | ||||
| github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8= | |||||
| github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA= | |||||
| github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k= | github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k= | ||||
| github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU= | github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU= | ||||
| github.com/olivere/elastic/v7 v7.0.9 h1:+bTR1xJbfLYD8WnTBt9672mFlKxjfWRJpEQ1y8BMS3g= | github.com/olivere/elastic/v7 v7.0.9 h1:+bTR1xJbfLYD8WnTBt9672mFlKxjfWRJpEQ1y8BMS3g= | ||||
| @@ -457,6 +473,8 @@ github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi | |||||
| github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= | github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= | ||||
| github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= | |||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | |||||
| github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||
| github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= | github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= | ||||
| @@ -631,6 +649,7 @@ golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTk | |||||
| golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= | golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= | ||||
| golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= | golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= | ||||
| golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= | golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= | ||||
| golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee h1:WG0RUwxtNT4qqaXX3DPA8zHFNm/D9xaBpxzHt1WcA/E= | |||||
| golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= | golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= | ||||
| golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| @@ -690,6 +709,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w | |||||
| golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY= | golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY= | ||||
| golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||||
| golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= | golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= | ||||
| golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||
| @@ -714,9 +734,10 @@ golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgw | |||||
| golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= | golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= | ||||
| golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= | golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= | ||||
| golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||
| golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 h1:kJQZhwFzSwJS2BxboKjdZzWczQOZx8VuH7Y8hhuGUtM= | |||||
| golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= | |||||
| golang.org/x/tools v0.0.0-20200225230052-807dcd883420 h1:4RJNOV+2rLxMEfr6QIpC7GEv9MjD6ApGXTCLrNF9+eA= | |||||
| golang.org/x/tools v0.0.0-20200225230052-807dcd883420/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= | |||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA= | |||||
| golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= | google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= | ||||
| google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= | google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= | ||||
| @@ -788,6 +809,7 @@ honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWh | |||||
| honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | ||||
| honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | ||||
| honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= | ||||
| k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= | |||||
| mvdan.cc/xurls/v2 v2.1.0 h1:KaMb5GLhlcSX+e+qhbRJODnUUBvlw01jt4yrjFIHAuA= | mvdan.cc/xurls/v2 v2.1.0 h1:KaMb5GLhlcSX+e+qhbRJODnUUBvlw01jt4yrjFIHAuA= | ||||
| mvdan.cc/xurls/v2 v2.1.0/go.mod h1:5GrSd9rOnKOpZaji1OZLYL/yeAAtGDlo/cFe+8K5n8E= | mvdan.cc/xurls/v2 v2.1.0/go.mod h1:5GrSd9rOnKOpZaji1OZLYL/yeAAtGDlo/cFe+8K5n8E= | ||||
| rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= | rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= | ||||
| @@ -21,8 +21,8 @@ import ( | |||||
| _ "code.gitea.io/gitea/modules/markup/markdown" | _ "code.gitea.io/gitea/modules/markup/markdown" | ||||
| _ "code.gitea.io/gitea/modules/markup/orgmode" | _ "code.gitea.io/gitea/modules/markup/orgmode" | ||||
| // for embed | |||||
| _ "github.com/shurcooL/vfsgen" | |||||
| // for build | |||||
| _ "code.gitea.io/gitea/build" | |||||
| "github.com/urfave/cli" | "github.com/urfave/cli" | ||||
| ) | ) | ||||
| @@ -6,4 +6,4 @@ | |||||
| package options | package options | ||||
| //go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../options options bindata.go | |||||
| //go:generate go run -mod=vendor ../../build/generate-bindata.go ../../options options bindata.go | |||||
| @@ -6,4 +6,4 @@ | |||||
| package public | package public | ||||
| //go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../public public bindata.go | |||||
| //go:generate go run -mod=vendor ../../build/generate-bindata.go ../../public public bindata.go | |||||
| @@ -6,4 +6,4 @@ | |||||
| package templates | package templates | ||||
| //go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../templates templates bindata.go | |||||
| //go:generate go run -mod=vendor ../../build/generate-bindata.go ../../templates templates bindata.go | |||||
| @@ -0,0 +1,20 @@ | |||||
| The MIT License (MIT) | |||||
| Copyright (c) 2013 Fatih Arslan | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy of | |||||
| this software and associated documentation files (the "Software"), to deal in | |||||
| the Software without restriction, including without limitation the rights to | |||||
| use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of | |||||
| the Software, and to permit persons to whom the Software is furnished to do so, | |||||
| subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS | |||||
| FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR | |||||
| COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER | |||||
| IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN | |||||
| CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | |||||
| @@ -0,0 +1,182 @@ | |||||
| # Archived project. No maintenance. | |||||
| This project is not maintained anymore and is archived. Feel free to fork and | |||||
| make your own changes if needed. For more detail read my blog post: [Taking an indefinite sabbatical from my projects](https://arslan.io/2018/10/09/taking-an-indefinite-sabbatical-from-my-projects/) | |||||
| Thanks to everyone for their valuable feedback and contributions. | |||||
| # Color [](https://godoc.org/github.com/fatih/color) | |||||
| Color lets you use colorized outputs in terms of [ANSI Escape | |||||
| Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It | |||||
| has support for Windows too! The API can be used in several ways, pick one that | |||||
| suits you. | |||||
|  | |||||
| ## Install | |||||
| ```bash | |||||
| go get github.com/fatih/color | |||||
| ``` | |||||
| ## Examples | |||||
| ### Standard colors | |||||
| ```go | |||||
| // Print with default helper functions | |||||
| color.Cyan("Prints text in cyan.") | |||||
| // A newline will be appended automatically | |||||
| color.Blue("Prints %s in blue.", "text") | |||||
| // These are using the default foreground colors | |||||
| color.Red("We have red") | |||||
| color.Magenta("And many others ..") | |||||
| ``` | |||||
| ### Mix and reuse colors | |||||
| ```go | |||||
| // Create a new color object | |||||
| c := color.New(color.FgCyan).Add(color.Underline) | |||||
| c.Println("Prints cyan text with an underline.") | |||||
| // Or just add them to New() | |||||
| d := color.New(color.FgCyan, color.Bold) | |||||
| d.Printf("This prints bold cyan %s\n", "too!.") | |||||
| // Mix up foreground and background colors, create new mixes! | |||||
| red := color.New(color.FgRed) | |||||
| boldRed := red.Add(color.Bold) | |||||
| boldRed.Println("This will print text in bold red.") | |||||
| whiteBackground := red.Add(color.BgWhite) | |||||
| whiteBackground.Println("Red text with white background.") | |||||
| ``` | |||||
| ### Use your own output (io.Writer) | |||||
| ```go | |||||
| // Use your own io.Writer output | |||||
| color.New(color.FgBlue).Fprintln(myWriter, "blue color!") | |||||
| blue := color.New(color.FgBlue) | |||||
| blue.Fprint(writer, "This will print text in blue.") | |||||
| ``` | |||||
| ### Custom print functions (PrintFunc) | |||||
| ```go | |||||
| // Create a custom print function for convenience | |||||
| red := color.New(color.FgRed).PrintfFunc() | |||||
| red("Warning") | |||||
| red("Error: %s", err) | |||||
| // Mix up multiple attributes | |||||
| notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() | |||||
| notice("Don't forget this...") | |||||
| ``` | |||||
| ### Custom fprint functions (FprintFunc) | |||||
| ```go | |||||
| blue := color.New(FgBlue).FprintfFunc() | |||||
| blue(myWriter, "important notice: %s", stars) | |||||
| // Mix up with multiple attributes | |||||
| success := color.New(color.Bold, color.FgGreen).FprintlnFunc() | |||||
| success(myWriter, "Don't forget this...") | |||||
| ``` | |||||
| ### Insert into noncolor strings (SprintFunc) | |||||
| ```go | |||||
| // Create SprintXxx functions to mix strings with other non-colorized strings: | |||||
| yellow := color.New(color.FgYellow).SprintFunc() | |||||
| red := color.New(color.FgRed).SprintFunc() | |||||
| fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error")) | |||||
| info := color.New(color.FgWhite, color.BgGreen).SprintFunc() | |||||
| fmt.Printf("This %s rocks!\n", info("package")) | |||||
| // Use helper functions | |||||
| fmt.Println("This", color.RedString("warning"), "should be not neglected.") | |||||
| fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.") | |||||
| // Windows supported too! Just don't forget to change the output to color.Output | |||||
| fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) | |||||
| ``` | |||||
| ### Plug into existing code | |||||
| ```go | |||||
| // Use handy standard colors | |||||
| color.Set(color.FgYellow) | |||||
| fmt.Println("Existing text will now be in yellow") | |||||
| fmt.Printf("This one %s\n", "too") | |||||
| color.Unset() // Don't forget to unset | |||||
| // You can mix up parameters | |||||
| color.Set(color.FgMagenta, color.Bold) | |||||
| defer color.Unset() // Use it in your function | |||||
| fmt.Println("All text will now be bold magenta.") | |||||
| ``` | |||||
| ### Disable/Enable color | |||||
| There might be a case where you want to explicitly disable/enable color output. the | |||||
| `go-isatty` package will automatically disable color output for non-tty output streams | |||||
| (for example if the output were piped directly to `less`) | |||||
| `Color` has support to disable/enable colors both globally and for single color | |||||
| definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You | |||||
| can easily disable the color output with: | |||||
| ```go | |||||
| var flagNoColor = flag.Bool("no-color", false, "Disable color output") | |||||
| if *flagNoColor { | |||||
| color.NoColor = true // disables colorized output | |||||
| } | |||||
| ``` | |||||
| It also has support for single color definitions (local). You can | |||||
| disable/enable color output on the fly: | |||||
| ```go | |||||
| c := color.New(color.FgCyan) | |||||
| c.Println("Prints cyan text") | |||||
| c.DisableColor() | |||||
| c.Println("This is printed without any color") | |||||
| c.EnableColor() | |||||
| c.Println("This prints again cyan...") | |||||
| ``` | |||||
| ## Todo | |||||
| * Save/Return previous values | |||||
| * Evaluate fmt.Formatter interface | |||||
| ## Credits | |||||
| * [Fatih Arslan](https://github.com/fatih) | |||||
| * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) | |||||
| ## License | |||||
| The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details | |||||
| @@ -0,0 +1,603 @@ | |||||
| package color | |||||
| import ( | |||||
| "fmt" | |||||
| "io" | |||||
| "os" | |||||
| "strconv" | |||||
| "strings" | |||||
| "sync" | |||||
| "github.com/mattn/go-colorable" | |||||
| "github.com/mattn/go-isatty" | |||||
| ) | |||||
| var ( | |||||
| // NoColor defines if the output is colorized or not. It's dynamically set to | |||||
| // false or true based on the stdout's file descriptor referring to a terminal | |||||
| // or not. This is a global option and affects all colors. For more control | |||||
| // over each color block use the methods DisableColor() individually. | |||||
| NoColor = os.Getenv("TERM") == "dumb" || | |||||
| (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) | |||||
| // Output defines the standard output of the print functions. By default | |||||
| // os.Stdout is used. | |||||
| Output = colorable.NewColorableStdout() | |||||
| // Error defines a color supporting writer for os.Stderr. | |||||
| Error = colorable.NewColorableStderr() | |||||
| // colorsCache is used to reduce the count of created Color objects and | |||||
| // allows to reuse already created objects with required Attribute. | |||||
| colorsCache = make(map[Attribute]*Color) | |||||
| colorsCacheMu sync.Mutex // protects colorsCache | |||||
| ) | |||||
| // Color defines a custom color object which is defined by SGR parameters. | |||||
| type Color struct { | |||||
| params []Attribute | |||||
| noColor *bool | |||||
| } | |||||
| // Attribute defines a single SGR Code | |||||
| type Attribute int | |||||
| const escape = "\x1b" | |||||
| // Base attributes | |||||
| const ( | |||||
| Reset Attribute = iota | |||||
| Bold | |||||
| Faint | |||||
| Italic | |||||
| Underline | |||||
| BlinkSlow | |||||
| BlinkRapid | |||||
| ReverseVideo | |||||
| Concealed | |||||
| CrossedOut | |||||
| ) | |||||
| // Foreground text colors | |||||
| const ( | |||||
| FgBlack Attribute = iota + 30 | |||||
| FgRed | |||||
| FgGreen | |||||
| FgYellow | |||||
| FgBlue | |||||
| FgMagenta | |||||
| FgCyan | |||||
| FgWhite | |||||
| ) | |||||
| // Foreground Hi-Intensity text colors | |||||
| const ( | |||||
| FgHiBlack Attribute = iota + 90 | |||||
| FgHiRed | |||||
| FgHiGreen | |||||
| FgHiYellow | |||||
| FgHiBlue | |||||
| FgHiMagenta | |||||
| FgHiCyan | |||||
| FgHiWhite | |||||
| ) | |||||
| // Background text colors | |||||
| const ( | |||||
| BgBlack Attribute = iota + 40 | |||||
| BgRed | |||||
| BgGreen | |||||
| BgYellow | |||||
| BgBlue | |||||
| BgMagenta | |||||
| BgCyan | |||||
| BgWhite | |||||
| ) | |||||
| // Background Hi-Intensity text colors | |||||
| const ( | |||||
| BgHiBlack Attribute = iota + 100 | |||||
| BgHiRed | |||||
| BgHiGreen | |||||
| BgHiYellow | |||||
| BgHiBlue | |||||
| BgHiMagenta | |||||
| BgHiCyan | |||||
| BgHiWhite | |||||
| ) | |||||
| // New returns a newly created color object. | |||||
| func New(value ...Attribute) *Color { | |||||
| c := &Color{params: make([]Attribute, 0)} | |||||
| c.Add(value...) | |||||
| return c | |||||
| } | |||||
| // Set sets the given parameters immediately. It will change the color of | |||||
| // output with the given SGR parameters until color.Unset() is called. | |||||
| func Set(p ...Attribute) *Color { | |||||
| c := New(p...) | |||||
| c.Set() | |||||
| return c | |||||
| } | |||||
| // Unset resets all escape attributes and clears the output. Usually should | |||||
| // be called after Set(). | |||||
| func Unset() { | |||||
| if NoColor { | |||||
| return | |||||
| } | |||||
| fmt.Fprintf(Output, "%s[%dm", escape, Reset) | |||||
| } | |||||
| // Set sets the SGR sequence. | |||||
| func (c *Color) Set() *Color { | |||||
| if c.isNoColorSet() { | |||||
| return c | |||||
| } | |||||
| fmt.Fprintf(Output, c.format()) | |||||
| return c | |||||
| } | |||||
| func (c *Color) unset() { | |||||
| if c.isNoColorSet() { | |||||
| return | |||||
| } | |||||
| Unset() | |||||
| } | |||||
| func (c *Color) setWriter(w io.Writer) *Color { | |||||
| if c.isNoColorSet() { | |||||
| return c | |||||
| } | |||||
| fmt.Fprintf(w, c.format()) | |||||
| return c | |||||
| } | |||||
| func (c *Color) unsetWriter(w io.Writer) { | |||||
| if c.isNoColorSet() { | |||||
| return | |||||
| } | |||||
| if NoColor { | |||||
| return | |||||
| } | |||||
| fmt.Fprintf(w, "%s[%dm", escape, Reset) | |||||
| } | |||||
| // Add is used to chain SGR parameters. Use as many as parameters to combine | |||||
| // and create custom color objects. Example: Add(color.FgRed, color.Underline). | |||||
| func (c *Color) Add(value ...Attribute) *Color { | |||||
| c.params = append(c.params, value...) | |||||
| return c | |||||
| } | |||||
| func (c *Color) prepend(value Attribute) { | |||||
| c.params = append(c.params, 0) | |||||
| copy(c.params[1:], c.params[0:]) | |||||
| c.params[0] = value | |||||
| } | |||||
| // Fprint formats using the default formats for its operands and writes to w. | |||||
| // Spaces are added between operands when neither is a string. | |||||
| // It returns the number of bytes written and any write error encountered. | |||||
| // On Windows, users should wrap w with colorable.NewColorable() if w is of | |||||
| // type *os.File. | |||||
| func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { | |||||
| c.setWriter(w) | |||||
| defer c.unsetWriter(w) | |||||
| return fmt.Fprint(w, a...) | |||||
| } | |||||
| // Print formats using the default formats for its operands and writes to | |||||
| // standard output. Spaces are added between operands when neither is a | |||||
| // string. It returns the number of bytes written and any write error | |||||
| // encountered. This is the standard fmt.Print() method wrapped with the given | |||||
| // color. | |||||
| func (c *Color) Print(a ...interface{}) (n int, err error) { | |||||
| c.Set() | |||||
| defer c.unset() | |||||
| return fmt.Fprint(Output, a...) | |||||
| } | |||||
| // Fprintf formats according to a format specifier and writes to w. | |||||
| // It returns the number of bytes written and any write error encountered. | |||||
| // On Windows, users should wrap w with colorable.NewColorable() if w is of | |||||
| // type *os.File. | |||||
| func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { | |||||
| c.setWriter(w) | |||||
| defer c.unsetWriter(w) | |||||
| return fmt.Fprintf(w, format, a...) | |||||
| } | |||||
| // Printf formats according to a format specifier and writes to standard output. | |||||
| // It returns the number of bytes written and any write error encountered. | |||||
| // This is the standard fmt.Printf() method wrapped with the given color. | |||||
| func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { | |||||
| c.Set() | |||||
| defer c.unset() | |||||
| return fmt.Fprintf(Output, format, a...) | |||||
| } | |||||
| // Fprintln formats using the default formats for its operands and writes to w. | |||||
| // Spaces are always added between operands and a newline is appended. | |||||
| // On Windows, users should wrap w with colorable.NewColorable() if w is of | |||||
| // type *os.File. | |||||
| func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { | |||||
| c.setWriter(w) | |||||
| defer c.unsetWriter(w) | |||||
| return fmt.Fprintln(w, a...) | |||||
| } | |||||
| // Println formats using the default formats for its operands and writes to | |||||
| // standard output. Spaces are always added between operands and a newline is | |||||
| // appended. It returns the number of bytes written and any write error | |||||
| // encountered. This is the standard fmt.Print() method wrapped with the given | |||||
| // color. | |||||
| func (c *Color) Println(a ...interface{}) (n int, err error) { | |||||
| c.Set() | |||||
| defer c.unset() | |||||
| return fmt.Fprintln(Output, a...) | |||||
| } | |||||
| // Sprint is just like Print, but returns a string instead of printing it. | |||||
| func (c *Color) Sprint(a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprint(a...)) | |||||
| } | |||||
| // Sprintln is just like Println, but returns a string instead of printing it. | |||||
| func (c *Color) Sprintln(a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprintln(a...)) | |||||
| } | |||||
| // Sprintf is just like Printf, but returns a string instead of printing it. | |||||
| func (c *Color) Sprintf(format string, a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprintf(format, a...)) | |||||
| } | |||||
| // FprintFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Fprint(). | |||||
| func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) { | |||||
| return func(w io.Writer, a ...interface{}) { | |||||
| c.Fprint(w, a...) | |||||
| } | |||||
| } | |||||
| // PrintFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Print(). | |||||
| func (c *Color) PrintFunc() func(a ...interface{}) { | |||||
| return func(a ...interface{}) { | |||||
| c.Print(a...) | |||||
| } | |||||
| } | |||||
| // FprintfFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Fprintf(). | |||||
| func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) { | |||||
| return func(w io.Writer, format string, a ...interface{}) { | |||||
| c.Fprintf(w, format, a...) | |||||
| } | |||||
| } | |||||
| // PrintfFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Printf(). | |||||
| func (c *Color) PrintfFunc() func(format string, a ...interface{}) { | |||||
| return func(format string, a ...interface{}) { | |||||
| c.Printf(format, a...) | |||||
| } | |||||
| } | |||||
| // FprintlnFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Fprintln(). | |||||
| func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) { | |||||
| return func(w io.Writer, a ...interface{}) { | |||||
| c.Fprintln(w, a...) | |||||
| } | |||||
| } | |||||
| // PrintlnFunc returns a new function that prints the passed arguments as | |||||
| // colorized with color.Println(). | |||||
| func (c *Color) PrintlnFunc() func(a ...interface{}) { | |||||
| return func(a ...interface{}) { | |||||
| c.Println(a...) | |||||
| } | |||||
| } | |||||
| // SprintFunc returns a new function that returns colorized strings for the | |||||
| // given arguments with fmt.Sprint(). Useful to put into or mix into other | |||||
| // string. Windows users should use this in conjunction with color.Output, example: | |||||
| // | |||||
| // put := New(FgYellow).SprintFunc() | |||||
| // fmt.Fprintf(color.Output, "This is a %s", put("warning")) | |||||
| func (c *Color) SprintFunc() func(a ...interface{}) string { | |||||
| return func(a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprint(a...)) | |||||
| } | |||||
| } | |||||
| // SprintfFunc returns a new function that returns colorized strings for the | |||||
| // given arguments with fmt.Sprintf(). Useful to put into or mix into other | |||||
| // string. Windows users should use this in conjunction with color.Output. | |||||
| func (c *Color) SprintfFunc() func(format string, a ...interface{}) string { | |||||
| return func(format string, a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprintf(format, a...)) | |||||
| } | |||||
| } | |||||
| // SprintlnFunc returns a new function that returns colorized strings for the | |||||
| // given arguments with fmt.Sprintln(). Useful to put into or mix into other | |||||
| // string. Windows users should use this in conjunction with color.Output. | |||||
| func (c *Color) SprintlnFunc() func(a ...interface{}) string { | |||||
| return func(a ...interface{}) string { | |||||
| return c.wrap(fmt.Sprintln(a...)) | |||||
| } | |||||
| } | |||||
| // sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m" | |||||
| // an example output might be: "1;36" -> bold cyan | |||||
| func (c *Color) sequence() string { | |||||
| format := make([]string, len(c.params)) | |||||
| for i, v := range c.params { | |||||
| format[i] = strconv.Itoa(int(v)) | |||||
| } | |||||
| return strings.Join(format, ";") | |||||
| } | |||||
| // wrap wraps the s string with the colors attributes. The string is ready to | |||||
| // be printed. | |||||
| func (c *Color) wrap(s string) string { | |||||
| if c.isNoColorSet() { | |||||
| return s | |||||
| } | |||||
| return c.format() + s + c.unformat() | |||||
| } | |||||
| func (c *Color) format() string { | |||||
| return fmt.Sprintf("%s[%sm", escape, c.sequence()) | |||||
| } | |||||
| func (c *Color) unformat() string { | |||||
| return fmt.Sprintf("%s[%dm", escape, Reset) | |||||
| } | |||||
| // DisableColor disables the color output. Useful to not change any existing | |||||
| // code and still being able to output. Can be used for flags like | |||||
| // "--no-color". To enable back use EnableColor() method. | |||||
| func (c *Color) DisableColor() { | |||||
| c.noColor = boolPtr(true) | |||||
| } | |||||
| // EnableColor enables the color output. Use it in conjunction with | |||||
| // DisableColor(). Otherwise this method has no side effects. | |||||
| func (c *Color) EnableColor() { | |||||
| c.noColor = boolPtr(false) | |||||
| } | |||||
| func (c *Color) isNoColorSet() bool { | |||||
| // check first if we have user setted action | |||||
| if c.noColor != nil { | |||||
| return *c.noColor | |||||
| } | |||||
| // if not return the global option, which is disabled by default | |||||
| return NoColor | |||||
| } | |||||
| // Equals returns a boolean value indicating whether two colors are equal. | |||||
| func (c *Color) Equals(c2 *Color) bool { | |||||
| if len(c.params) != len(c2.params) { | |||||
| return false | |||||
| } | |||||
| for _, attr := range c.params { | |||||
| if !c2.attrExists(attr) { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (c *Color) attrExists(a Attribute) bool { | |||||
| for _, attr := range c.params { | |||||
| if attr == a { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func boolPtr(v bool) *bool { | |||||
| return &v | |||||
| } | |||||
| func getCachedColor(p Attribute) *Color { | |||||
| colorsCacheMu.Lock() | |||||
| defer colorsCacheMu.Unlock() | |||||
| c, ok := colorsCache[p] | |||||
| if !ok { | |||||
| c = New(p) | |||||
| colorsCache[p] = c | |||||
| } | |||||
| return c | |||||
| } | |||||
| func colorPrint(format string, p Attribute, a ...interface{}) { | |||||
| c := getCachedColor(p) | |||||
| if !strings.HasSuffix(format, "\n") { | |||||
| format += "\n" | |||||
| } | |||||
| if len(a) == 0 { | |||||
| c.Print(format) | |||||
| } else { | |||||
| c.Printf(format, a...) | |||||
| } | |||||
| } | |||||
| func colorString(format string, p Attribute, a ...interface{}) string { | |||||
| c := getCachedColor(p) | |||||
| if len(a) == 0 { | |||||
| return c.SprintFunc()(format) | |||||
| } | |||||
| return c.SprintfFunc()(format, a...) | |||||
| } | |||||
| // Black is a convenient helper function to print with black foreground. A | |||||
| // newline is appended to format by default. | |||||
| func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) } | |||||
| // Red is a convenient helper function to print with red foreground. A | |||||
| // newline is appended to format by default. | |||||
| func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) } | |||||
| // Green is a convenient helper function to print with green foreground. A | |||||
| // newline is appended to format by default. | |||||
| func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) } | |||||
| // Yellow is a convenient helper function to print with yellow foreground. | |||||
| // A newline is appended to format by default. | |||||
| func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) } | |||||
| // Blue is a convenient helper function to print with blue foreground. A | |||||
| // newline is appended to format by default. | |||||
| func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) } | |||||
| // Magenta is a convenient helper function to print with magenta foreground. | |||||
| // A newline is appended to format by default. | |||||
| func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) } | |||||
| // Cyan is a convenient helper function to print with cyan foreground. A | |||||
| // newline is appended to format by default. | |||||
| func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) } | |||||
| // White is a convenient helper function to print with white foreground. A | |||||
| // newline is appended to format by default. | |||||
| func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) } | |||||
| // BlackString is a convenient helper function to return a string with black | |||||
| // foreground. | |||||
| func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) } | |||||
| // RedString is a convenient helper function to return a string with red | |||||
| // foreground. | |||||
| func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) } | |||||
| // GreenString is a convenient helper function to return a string with green | |||||
| // foreground. | |||||
| func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) } | |||||
| // YellowString is a convenient helper function to return a string with yellow | |||||
| // foreground. | |||||
| func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) } | |||||
| // BlueString is a convenient helper function to return a string with blue | |||||
| // foreground. | |||||
| func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) } | |||||
| // MagentaString is a convenient helper function to return a string with magenta | |||||
| // foreground. | |||||
| func MagentaString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgMagenta, a...) | |||||
| } | |||||
| // CyanString is a convenient helper function to return a string with cyan | |||||
| // foreground. | |||||
| func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) } | |||||
| // WhiteString is a convenient helper function to return a string with white | |||||
| // foreground. | |||||
| func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) } | |||||
| // HiBlack is a convenient helper function to print with hi-intensity black foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) } | |||||
| // HiRed is a convenient helper function to print with hi-intensity red foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) } | |||||
| // HiGreen is a convenient helper function to print with hi-intensity green foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) } | |||||
| // HiYellow is a convenient helper function to print with hi-intensity yellow foreground. | |||||
| // A newline is appended to format by default. | |||||
| func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) } | |||||
| // HiBlue is a convenient helper function to print with hi-intensity blue foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) } | |||||
| // HiMagenta is a convenient helper function to print with hi-intensity magenta foreground. | |||||
| // A newline is appended to format by default. | |||||
| func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) } | |||||
| // HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) } | |||||
| // HiWhite is a convenient helper function to print with hi-intensity white foreground. A | |||||
| // newline is appended to format by default. | |||||
| func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) } | |||||
| // HiBlackString is a convenient helper function to return a string with hi-intensity black | |||||
| // foreground. | |||||
| func HiBlackString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgHiBlack, a...) | |||||
| } | |||||
| // HiRedString is a convenient helper function to return a string with hi-intensity red | |||||
| // foreground. | |||||
| func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) } | |||||
| // HiGreenString is a convenient helper function to return a string with hi-intensity green | |||||
| // foreground. | |||||
| func HiGreenString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgHiGreen, a...) | |||||
| } | |||||
| // HiYellowString is a convenient helper function to return a string with hi-intensity yellow | |||||
| // foreground. | |||||
| func HiYellowString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgHiYellow, a...) | |||||
| } | |||||
| // HiBlueString is a convenient helper function to return a string with hi-intensity blue | |||||
| // foreground. | |||||
| func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) } | |||||
| // HiMagentaString is a convenient helper function to return a string with hi-intensity magenta | |||||
| // foreground. | |||||
| func HiMagentaString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgHiMagenta, a...) | |||||
| } | |||||
| // HiCyanString is a convenient helper function to return a string with hi-intensity cyan | |||||
| // foreground. | |||||
| func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) } | |||||
| // HiWhiteString is a convenient helper function to return a string with hi-intensity white | |||||
| // foreground. | |||||
| func HiWhiteString(format string, a ...interface{}) string { | |||||
| return colorString(format, FgHiWhite, a...) | |||||
| } | |||||
| @@ -0,0 +1,133 @@ | |||||
| /* | |||||
| Package color is an ANSI color package to output colorized or SGR defined | |||||
| output to the standard output. The API can be used in several way, pick one | |||||
| that suits you. | |||||
| Use simple and default helper functions with predefined foreground colors: | |||||
| color.Cyan("Prints text in cyan.") | |||||
| // a newline will be appended automatically | |||||
| color.Blue("Prints %s in blue.", "text") | |||||
| // More default foreground colors.. | |||||
| color.Red("We have red") | |||||
| color.Yellow("Yellow color too!") | |||||
| color.Magenta("And many others ..") | |||||
| // Hi-intensity colors | |||||
| color.HiGreen("Bright green color.") | |||||
| color.HiBlack("Bright black means gray..") | |||||
| color.HiWhite("Shiny white color!") | |||||
| However there are times where custom color mixes are required. Below are some | |||||
| examples to create custom color objects and use the print functions of each | |||||
| separate color object. | |||||
| // Create a new color object | |||||
| c := color.New(color.FgCyan).Add(color.Underline) | |||||
| c.Println("Prints cyan text with an underline.") | |||||
| // Or just add them to New() | |||||
| d := color.New(color.FgCyan, color.Bold) | |||||
| d.Printf("This prints bold cyan %s\n", "too!.") | |||||
| // Mix up foreground and background colors, create new mixes! | |||||
| red := color.New(color.FgRed) | |||||
| boldRed := red.Add(color.Bold) | |||||
| boldRed.Println("This will print text in bold red.") | |||||
| whiteBackground := red.Add(color.BgWhite) | |||||
| whiteBackground.Println("Red text with White background.") | |||||
| // Use your own io.Writer output | |||||
| color.New(color.FgBlue).Fprintln(myWriter, "blue color!") | |||||
| blue := color.New(color.FgBlue) | |||||
| blue.Fprint(myWriter, "This will print text in blue.") | |||||
| You can create PrintXxx functions to simplify even more: | |||||
| // Create a custom print function for convenient | |||||
| red := color.New(color.FgRed).PrintfFunc() | |||||
| red("warning") | |||||
| red("error: %s", err) | |||||
| // Mix up multiple attributes | |||||
| notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() | |||||
| notice("don't forget this...") | |||||
| You can also FprintXxx functions to pass your own io.Writer: | |||||
| blue := color.New(FgBlue).FprintfFunc() | |||||
| blue(myWriter, "important notice: %s", stars) | |||||
| // Mix up with multiple attributes | |||||
| success := color.New(color.Bold, color.FgGreen).FprintlnFunc() | |||||
| success(myWriter, don't forget this...") | |||||
| Or create SprintXxx functions to mix strings with other non-colorized strings: | |||||
| yellow := New(FgYellow).SprintFunc() | |||||
| red := New(FgRed).SprintFunc() | |||||
| fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) | |||||
| info := New(FgWhite, BgGreen).SprintFunc() | |||||
| fmt.Printf("this %s rocks!\n", info("package")) | |||||
| Windows support is enabled by default. All Print functions work as intended. | |||||
| However only for color.SprintXXX functions, user should use fmt.FprintXXX and | |||||
| set the output to color.Output: | |||||
| fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) | |||||
| info := New(FgWhite, BgGreen).SprintFunc() | |||||
| fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) | |||||
| Using with existing code is possible. Just use the Set() method to set the | |||||
| standard output to the given parameters. That way a rewrite of an existing | |||||
| code is not required. | |||||
| // Use handy standard colors. | |||||
| color.Set(color.FgYellow) | |||||
| fmt.Println("Existing text will be now in Yellow") | |||||
| fmt.Printf("This one %s\n", "too") | |||||
| color.Unset() // don't forget to unset | |||||
| // You can mix up parameters | |||||
| color.Set(color.FgMagenta, color.Bold) | |||||
| defer color.Unset() // use it in your function | |||||
| fmt.Println("All text will be now bold magenta.") | |||||
| There might be a case where you want to disable color output (for example to | |||||
| pipe the standard output of your app to somewhere else). `Color` has support to | |||||
| disable colors both globally and for single color definition. For example | |||||
| suppose you have a CLI app and a `--no-color` bool flag. You can easily disable | |||||
| the color output with: | |||||
| var flagNoColor = flag.Bool("no-color", false, "Disable color output") | |||||
| if *flagNoColor { | |||||
| color.NoColor = true // disables colorized output | |||||
| } | |||||
| It also has support for single color definitions (local). You can | |||||
| disable/enable color output on the fly: | |||||
| c := color.New(color.FgCyan) | |||||
| c.Println("Prints cyan text") | |||||
| c.DisableColor() | |||||
| c.Println("This is printed without any color") | |||||
| c.EnableColor() | |||||
| c.Println("This prints again cyan...") | |||||
| */ | |||||
| package color | |||||
| @@ -0,0 +1,8 @@ | |||||
| module github.com/fatih/color | |||||
| go 1.13 | |||||
| require ( | |||||
| github.com/mattn/go-colorable v0.1.4 | |||||
| github.com/mattn/go-isatty v0.0.11 | |||||
| ) | |||||
| @@ -0,0 +1,8 @@ | |||||
| github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= | |||||
| github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= | |||||
| github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= | |||||
| github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM= | |||||
| github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= | |||||
| golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||||
| golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= | |||||
| golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||||
| @@ -0,0 +1,60 @@ | |||||
| Copyright (c) 2017, Fatih Arslan | |||||
| All rights reserved. | |||||
| Redistribution and use in source and binary forms, with or without | |||||
| modification, are permitted provided that the following conditions are met: | |||||
| * Redistributions of source code must retain the above copyright notice, this | |||||
| list of conditions and the following disclaimer. | |||||
| * Redistributions in binary form must reproduce the above copyright notice, | |||||
| this list of conditions and the following disclaimer in the documentation | |||||
| and/or other materials provided with the distribution. | |||||
| * Neither the name of structtag nor the names of its | |||||
| contributors may be used to endorse or promote products derived from | |||||
| this software without specific prior written permission. | |||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | |||||
| AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |||||
| IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |||||
| DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | |||||
| FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | |||||
| DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | |||||
| SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | |||||
| CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | |||||
| OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |||||
| OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||||
| This software includes some portions from Go. Go is used under the terms of the | |||||
| BSD like license. | |||||
| Copyright (c) 2012 The Go Authors. All rights reserved. | |||||
| Redistribution and use in source and binary forms, with or without | |||||
| modification, are permitted provided that the following conditions are | |||||
| met: | |||||
| * Redistributions of source code must retain the above copyright | |||||
| notice, this list of conditions and the following disclaimer. | |||||
| * Redistributions in binary form must reproduce the above | |||||
| copyright notice, this list of conditions and the following disclaimer | |||||
| in the documentation and/or other materials provided with the | |||||
| distribution. | |||||
| * Neither the name of Google Inc. nor the names of its | |||||
| contributors may be used to endorse or promote products derived from | |||||
| this software without specific prior written permission. | |||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |||||
| "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |||||
| LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |||||
| A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |||||
| OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |||||
| SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |||||
| LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |||||
| DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |||||
| THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |||||
| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |||||
| OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||||
| The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher | |||||
| @@ -0,0 +1,73 @@ | |||||
| # structtag [](http://godoc.org/github.com/fatih/structtag) | |||||
| structtag provides an easy way of parsing and manipulating struct tag fields. | |||||
| Please vendor the library as it might change in future versions. | |||||
| # Install | |||||
| ```bash | |||||
| go get github.com/fatih/structtag | |||||
| ``` | |||||
| # Example | |||||
| ```go | |||||
| package main | |||||
| import ( | |||||
| "fmt" | |||||
| "reflect" | |||||
| "sort" | |||||
| "github.com/fatih/structtag" | |||||
| ) | |||||
| func main() { | |||||
| type t struct { | |||||
| t string `json:"foo,omitempty,string" xml:"foo"` | |||||
| } | |||||
| // get field tag | |||||
| tag := reflect.TypeOf(t{}).Field(0).Tag | |||||
| // ... and start using structtag by parsing the tag | |||||
| tags, err := structtag.Parse(string(tag)) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| // iterate over all tags | |||||
| for _, t := range tags.Tags() { | |||||
| fmt.Printf("tag: %+v\n", t) | |||||
| } | |||||
| // get a single tag | |||||
| jsonTag, err := tags.Get("json") | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| fmt.Println(jsonTag) // Output: json:"foo,omitempty,string" | |||||
| fmt.Println(jsonTag.Key) // Output: json | |||||
| fmt.Println(jsonTag.Name) // Output: foo | |||||
| fmt.Println(jsonTag.Options) // Output: [omitempty string] | |||||
| // change existing tag | |||||
| jsonTag.Name = "foo_bar" | |||||
| jsonTag.Options = nil | |||||
| tags.Set(jsonTag) | |||||
| // add new tag | |||||
| tags.Set(&structtag.Tag{ | |||||
| Key: "hcl", | |||||
| Name: "foo", | |||||
| Options: []string{"squash"}, | |||||
| }) | |||||
| // print the tags | |||||
| fmt.Println(tags) // Output: json:"foo_bar" xml:"foo" hcl:"foo,squash" | |||||
| // sort tags according to keys | |||||
| sort.Sort(tags) | |||||
| fmt.Println(tags) // Output: hcl:"foo,squash" json:"foo_bar" xml:"foo" | |||||
| } | |||||
| ``` | |||||
| @@ -0,0 +1,3 @@ | |||||
| module github.com/fatih/structtag | |||||
| go 1.12 | |||||
| @@ -0,0 +1,315 @@ | |||||
| package structtag | |||||
| import ( | |||||
| "bytes" | |||||
| "errors" | |||||
| "fmt" | |||||
| "strconv" | |||||
| "strings" | |||||
| ) | |||||
| var ( | |||||
| errTagSyntax = errors.New("bad syntax for struct tag pair") | |||||
| errTagKeySyntax = errors.New("bad syntax for struct tag key") | |||||
| errTagValueSyntax = errors.New("bad syntax for struct tag value") | |||||
| errKeyNotSet = errors.New("tag key does not exist") | |||||
| errTagNotExist = errors.New("tag does not exist") | |||||
| errTagKeyMismatch = errors.New("mismatch between key and tag.key") | |||||
| ) | |||||
| // Tags represent a set of tags from a single struct field | |||||
| type Tags struct { | |||||
| tags []*Tag | |||||
| } | |||||
| // Tag defines a single struct's string literal tag | |||||
| type Tag struct { | |||||
| // Key is the tag key, such as json, xml, etc.. | |||||
| // i.e: `json:"foo,omitempty". Here key is: "json" | |||||
| Key string | |||||
| // Name is a part of the value | |||||
| // i.e: `json:"foo,omitempty". Here name is: "foo" | |||||
| Name string | |||||
| // Options is a part of the value. It contains a slice of tag options i.e: | |||||
| // `json:"foo,omitempty". Here options is: ["omitempty"] | |||||
| Options []string | |||||
| } | |||||
| // Parse parses a single struct field tag and returns the set of tags. | |||||
| func Parse(tag string) (*Tags, error) { | |||||
| var tags []*Tag | |||||
| hasTag := tag != "" | |||||
| // NOTE(arslan) following code is from reflect and vet package with some | |||||
| // modifications to collect all necessary information and extend it with | |||||
| // usable methods | |||||
| for tag != "" { | |||||
| // Skip leading space. | |||||
| i := 0 | |||||
| for i < len(tag) && tag[i] == ' ' { | |||||
| i++ | |||||
| } | |||||
| tag = tag[i:] | |||||
| if tag == "" { | |||||
| break | |||||
| } | |||||
| // Scan to colon. A space, a quote or a control character is a syntax | |||||
| // error. Strictly speaking, control chars include the range [0x7f, | |||||
| // 0x9f], not just [0x00, 0x1f], but in practice, we ignore the | |||||
| // multi-byte control characters as it is simpler to inspect the tag's | |||||
| // bytes than the tag's runes. | |||||
| i = 0 | |||||
| for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { | |||||
| i++ | |||||
| } | |||||
| if i == 0 { | |||||
| return nil, errTagKeySyntax | |||||
| } | |||||
| if i+1 >= len(tag) || tag[i] != ':' { | |||||
| return nil, errTagSyntax | |||||
| } | |||||
| if tag[i+1] != '"' { | |||||
| return nil, errTagValueSyntax | |||||
| } | |||||
| key := string(tag[:i]) | |||||
| tag = tag[i+1:] | |||||
| // Scan quoted string to find value. | |||||
| i = 1 | |||||
| for i < len(tag) && tag[i] != '"' { | |||||
| if tag[i] == '\\' { | |||||
| i++ | |||||
| } | |||||
| i++ | |||||
| } | |||||
| if i >= len(tag) { | |||||
| return nil, errTagValueSyntax | |||||
| } | |||||
| qvalue := string(tag[:i+1]) | |||||
| tag = tag[i+1:] | |||||
| value, err := strconv.Unquote(qvalue) | |||||
| if err != nil { | |||||
| return nil, errTagValueSyntax | |||||
| } | |||||
| res := strings.Split(value, ",") | |||||
| name := res[0] | |||||
| options := res[1:] | |||||
| if len(options) == 0 { | |||||
| options = nil | |||||
| } | |||||
| tags = append(tags, &Tag{ | |||||
| Key: key, | |||||
| Name: name, | |||||
| Options: options, | |||||
| }) | |||||
| } | |||||
| if hasTag && len(tags) == 0 { | |||||
| return nil, nil | |||||
| } | |||||
| return &Tags{ | |||||
| tags: tags, | |||||
| }, nil | |||||
| } | |||||
| // Get returns the tag associated with the given key. If the key is present | |||||
| // in the tag the value (which may be empty) is returned. Otherwise the | |||||
| // returned value will be the empty string. The ok return value reports whether | |||||
| // the tag exists or not (which the return value is nil). | |||||
| func (t *Tags) Get(key string) (*Tag, error) { | |||||
| for _, tag := range t.tags { | |||||
| if tag.Key == key { | |||||
| return tag, nil | |||||
| } | |||||
| } | |||||
| return nil, errTagNotExist | |||||
| } | |||||
| // Set sets the given tag. If the tag key already exists it'll override it | |||||
| func (t *Tags) Set(tag *Tag) error { | |||||
| if tag.Key == "" { | |||||
| return errKeyNotSet | |||||
| } | |||||
| added := false | |||||
| for i, tg := range t.tags { | |||||
| if tg.Key == tag.Key { | |||||
| added = true | |||||
| t.tags[i] = tag | |||||
| } | |||||
| } | |||||
| if !added { | |||||
| // this means this is a new tag, add it | |||||
| t.tags = append(t.tags, tag) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // AddOptions adds the given option for the given key. If the option already | |||||
| // exists it doesn't add it again. | |||||
| func (t *Tags) AddOptions(key string, options ...string) { | |||||
| for i, tag := range t.tags { | |||||
| if tag.Key != key { | |||||
| continue | |||||
| } | |||||
| for _, opt := range options { | |||||
| if !tag.HasOption(opt) { | |||||
| tag.Options = append(tag.Options, opt) | |||||
| } | |||||
| } | |||||
| t.tags[i] = tag | |||||
| } | |||||
| } | |||||
| // DeleteOptions deletes the given options for the given key | |||||
| func (t *Tags) DeleteOptions(key string, options ...string) { | |||||
| hasOption := func(option string) bool { | |||||
| for _, opt := range options { | |||||
| if opt == option { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| for i, tag := range t.tags { | |||||
| if tag.Key != key { | |||||
| continue | |||||
| } | |||||
| var updated []string | |||||
| for _, opt := range tag.Options { | |||||
| if !hasOption(opt) { | |||||
| updated = append(updated, opt) | |||||
| } | |||||
| } | |||||
| tag.Options = updated | |||||
| t.tags[i] = tag | |||||
| } | |||||
| } | |||||
| // Delete deletes the tag for the given keys | |||||
| func (t *Tags) Delete(keys ...string) { | |||||
| hasKey := func(key string) bool { | |||||
| for _, k := range keys { | |||||
| if k == key { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| var updated []*Tag | |||||
| for _, tag := range t.tags { | |||||
| if !hasKey(tag.Key) { | |||||
| updated = append(updated, tag) | |||||
| } | |||||
| } | |||||
| t.tags = updated | |||||
| } | |||||
| // Tags returns a slice of tags. The order is the original tag order unless it | |||||
| // was changed. | |||||
| func (t *Tags) Tags() []*Tag { | |||||
| return t.tags | |||||
| } | |||||
| // Tags returns a slice of tags. The order is the original tag order unless it | |||||
| // was changed. | |||||
| func (t *Tags) Keys() []string { | |||||
| var keys []string | |||||
| for _, tag := range t.tags { | |||||
| keys = append(keys, tag.Key) | |||||
| } | |||||
| return keys | |||||
| } | |||||
| // String reassembles the tags into a valid literal tag field representation | |||||
| func (t *Tags) String() string { | |||||
| tags := t.Tags() | |||||
| if len(tags) == 0 { | |||||
| return "" | |||||
| } | |||||
| var buf bytes.Buffer | |||||
| for i, tag := range t.Tags() { | |||||
| buf.WriteString(tag.String()) | |||||
| if i != len(tags)-1 { | |||||
| buf.WriteString(" ") | |||||
| } | |||||
| } | |||||
| return buf.String() | |||||
| } | |||||
| // HasOption returns true if the given option is available in options | |||||
| func (t *Tag) HasOption(opt string) bool { | |||||
| for _, tagOpt := range t.Options { | |||||
| if tagOpt == opt { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| // Value returns the raw value of the tag, i.e. if the tag is | |||||
| // `json:"foo,omitempty", the Value is "foo,omitempty" | |||||
| func (t *Tag) Value() string { | |||||
| options := strings.Join(t.Options, ",") | |||||
| if options != "" { | |||||
| return fmt.Sprintf(`%s,%s`, t.Name, options) | |||||
| } | |||||
| return t.Name | |||||
| } | |||||
| // String reassembles the tag into a valid tag field representation | |||||
| func (t *Tag) String() string { | |||||
| return fmt.Sprintf(`%s:%q`, t.Key, t.Value()) | |||||
| } | |||||
| // GoString implements the fmt.GoStringer interface | |||||
| func (t *Tag) GoString() string { | |||||
| template := `{ | |||||
| Key: '%s', | |||||
| Name: '%s', | |||||
| Option: '%s', | |||||
| }` | |||||
| if t.Options == nil { | |||||
| return fmt.Sprintf(template, t.Key, t.Name, "nil") | |||||
| } | |||||
| options := strings.Join(t.Options, ",") | |||||
| return fmt.Sprintf(template, t.Key, t.Name, options) | |||||
| } | |||||
| func (t *Tags) Len() int { | |||||
| return len(t.tags) | |||||
| } | |||||
| func (t *Tags) Less(i int, j int) bool { | |||||
| return t.tags[i].Key < t.tags[j].Key | |||||
| } | |||||
| func (t *Tags) Swap(i int, j int) { | |||||
| t.tags[i], t.tags[j] = t.tags[j], t.tags[i] | |||||
| } | |||||
| @@ -0,0 +1,9 @@ | |||||
| language: go | |||||
| go: | |||||
| - tip | |||||
| before_install: | |||||
| - go get github.com/mattn/goveralls | |||||
| - go get golang.org/x/tools/cmd/cover | |||||
| script: | |||||
| - $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw | |||||
| @@ -0,0 +1,21 @@ | |||||
| The MIT License (MIT) | |||||
| Copyright (c) 2016 Yasuhiro Matsumoto | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | |||||
| of this software and associated documentation files (the "Software"), to deal | |||||
| in the Software without restriction, including without limitation the rights | |||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||||
| copies of the Software, and to permit persons to whom the Software is | |||||
| furnished to do so, subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||||
| SOFTWARE. | |||||
| @@ -0,0 +1,48 @@ | |||||
| # go-colorable | |||||
| [](http://godoc.org/github.com/mattn/go-colorable) | |||||
| [](https://travis-ci.org/mattn/go-colorable) | |||||
| [](https://coveralls.io/github/mattn/go-colorable?branch=master) | |||||
| [](https://goreportcard.com/report/mattn/go-colorable) | |||||
| Colorable writer for windows. | |||||
| For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.) | |||||
| This package is possible to handle escape sequence for ansi color on windows. | |||||
| ## Too Bad! | |||||
|  | |||||
| ## So Good! | |||||
|  | |||||
| ## Usage | |||||
| ```go | |||||
| logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true}) | |||||
| logrus.SetOutput(colorable.NewColorableStdout()) | |||||
| logrus.Info("succeeded") | |||||
| logrus.Warn("not correct") | |||||
| logrus.Error("something error") | |||||
| logrus.Fatal("panic") | |||||
| ``` | |||||
| You can compile above code on non-windows OSs. | |||||
| ## Installation | |||||
| ``` | |||||
| $ go get github.com/mattn/go-colorable | |||||
| ``` | |||||
| # License | |||||
| MIT | |||||
| # Author | |||||
| Yasuhiro Matsumoto (a.k.a mattn) | |||||
| @@ -0,0 +1,29 @@ | |||||
| // +build appengine | |||||
| package colorable | |||||
| import ( | |||||
| "io" | |||||
| "os" | |||||
| _ "github.com/mattn/go-isatty" | |||||
| ) | |||||
| // NewColorable returns new instance of Writer which handles escape sequence. | |||||
| func NewColorable(file *os.File) io.Writer { | |||||
| if file == nil { | |||||
| panic("nil passed instead of *os.File to NewColorable()") | |||||
| } | |||||
| return file | |||||
| } | |||||
| // NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. | |||||
| func NewColorableStdout() io.Writer { | |||||
| return os.Stdout | |||||
| } | |||||
| // NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. | |||||
| func NewColorableStderr() io.Writer { | |||||
| return os.Stderr | |||||
| } | |||||
| @@ -0,0 +1,30 @@ | |||||
| // +build !windows | |||||
| // +build !appengine | |||||
| package colorable | |||||
| import ( | |||||
| "io" | |||||
| "os" | |||||
| _ "github.com/mattn/go-isatty" | |||||
| ) | |||||
| // NewColorable returns new instance of Writer which handles escape sequence. | |||||
| func NewColorable(file *os.File) io.Writer { | |||||
| if file == nil { | |||||
| panic("nil passed instead of *os.File to NewColorable()") | |||||
| } | |||||
| return file | |||||
| } | |||||
| // NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. | |||||
| func NewColorableStdout() io.Writer { | |||||
| return os.Stdout | |||||
| } | |||||
| // NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. | |||||
| func NewColorableStderr() io.Writer { | |||||
| return os.Stderr | |||||
| } | |||||
| @@ -0,0 +1,3 @@ | |||||
| module github.com/mattn/go-colorable | |||||
| require github.com/mattn/go-isatty v0.0.8 | |||||
| @@ -0,0 +1,4 @@ | |||||
| github.com/mattn/go-isatty v0.0.5 h1:tHXDdz1cpzGaovsTB+TVB8q90WEokoVmfMqoVcrLUgw= | |||||
| github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= | |||||
| golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8= | |||||
| golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||||
| @@ -0,0 +1,55 @@ | |||||
| package colorable | |||||
| import ( | |||||
| "bytes" | |||||
| "io" | |||||
| ) | |||||
| // NonColorable holds writer but removes escape sequence. | |||||
| type NonColorable struct { | |||||
| out io.Writer | |||||
| } | |||||
| // NewNonColorable returns new instance of Writer which removes escape sequence from Writer. | |||||
| func NewNonColorable(w io.Writer) io.Writer { | |||||
| return &NonColorable{out: w} | |||||
| } | |||||
| // Write writes data on console | |||||
| func (w *NonColorable) Write(data []byte) (n int, err error) { | |||||
| er := bytes.NewReader(data) | |||||
| var bw [1]byte | |||||
| loop: | |||||
| for { | |||||
| c1, err := er.ReadByte() | |||||
| if err != nil { | |||||
| break loop | |||||
| } | |||||
| if c1 != 0x1b { | |||||
| bw[0] = c1 | |||||
| w.out.Write(bw[:]) | |||||
| continue | |||||
| } | |||||
| c2, err := er.ReadByte() | |||||
| if err != nil { | |||||
| break loop | |||||
| } | |||||
| if c2 != 0x5b { | |||||
| continue | |||||
| } | |||||
| var buf bytes.Buffer | |||||
| for { | |||||
| c, err := er.ReadByte() | |||||
| if err != nil { | |||||
| break loop | |||||
| } | |||||
| if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { | |||||
| break | |||||
| } | |||||
| buf.Write([]byte(string(c))) | |||||
| } | |||||
| } | |||||
| return len(data), nil | |||||
| } | |||||
| @@ -1,3 +1,5 @@ | |||||
| module github.com/mattn/go-isatty | module github.com/mattn/go-isatty | ||||
| require golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 | |||||
| go 1.12 | |||||
| require golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 | |||||
| @@ -1,2 +1,2 @@ | |||||
| golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8= | |||||
| golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||||
| golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= | |||||
| golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||||
| @@ -1,4 +1,4 @@ | |||||
| // +build appengine js | |||||
| // +build appengine js nacl | |||||
| package isatty | package isatty | ||||
| @@ -0,0 +1,22 @@ | |||||
| // +build plan9 | |||||
| package isatty | |||||
| import ( | |||||
| "syscall" | |||||
| ) | |||||
| // IsTerminal returns true if the given file descriptor is a terminal. | |||||
| func IsTerminal(fd uintptr) bool { | |||||
| path, err := syscall.Fd2path(int(fd)) | |||||
| if err != nil { | |||||
| return false | |||||
| } | |||||
| return path == "/dev/cons" || path == "/mnt/term/dev/cons" | |||||
| } | |||||
| // IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 | |||||
| // terminal. This is also always false on this environment. | |||||
| func IsCygwinTerminal(fd uintptr) bool { | |||||
| return false | |||||
| } | |||||
| @@ -1,4 +1,4 @@ | |||||
| // +build linux | |||||
| // +build linux aix | |||||
| // +build !appengine | // +build !appengine | ||||
| // +build !android | // +build !android | ||||
| @@ -4,6 +4,7 @@ | |||||
| package isatty | package isatty | ||||
| import ( | import ( | ||||
| "errors" | |||||
| "strings" | "strings" | ||||
| "syscall" | "syscall" | ||||
| "unicode/utf16" | "unicode/utf16" | ||||
| @@ -11,15 +12,18 @@ import ( | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| fileNameInfo uintptr = 2 | |||||
| fileTypePipe = 3 | |||||
| objectNameInfo uintptr = 1 | |||||
| fileNameInfo = 2 | |||||
| fileTypePipe = 3 | |||||
| ) | ) | ||||
| var ( | var ( | ||||
| kernel32 = syscall.NewLazyDLL("kernel32.dll") | kernel32 = syscall.NewLazyDLL("kernel32.dll") | ||||
| ntdll = syscall.NewLazyDLL("ntdll.dll") | |||||
| procGetConsoleMode = kernel32.NewProc("GetConsoleMode") | procGetConsoleMode = kernel32.NewProc("GetConsoleMode") | ||||
| procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx") | procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx") | ||||
| procGetFileType = kernel32.NewProc("GetFileType") | procGetFileType = kernel32.NewProc("GetFileType") | ||||
| procNtQueryObject = ntdll.NewProc("NtQueryObject") | |||||
| ) | ) | ||||
| func init() { | func init() { | ||||
| @@ -45,7 +49,10 @@ func isCygwinPipeName(name string) bool { | |||||
| return false | return false | ||||
| } | } | ||||
| if token[0] != `\msys` && token[0] != `\cygwin` { | |||||
| if token[0] != `\msys` && | |||||
| token[0] != `\cygwin` && | |||||
| token[0] != `\Device\NamedPipe\msys` && | |||||
| token[0] != `\Device\NamedPipe\cygwin` { | |||||
| return false | return false | ||||
| } | } | ||||
| @@ -68,11 +75,35 @@ func isCygwinPipeName(name string) bool { | |||||
| return true | return true | ||||
| } | } | ||||
| // getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler | |||||
| // since GetFileInformationByHandleEx is not avilable under windows Vista and still some old fashion | |||||
| // guys are using Windows XP, this is a workaround for those guys, it will also work on system from | |||||
| // Windows vista to 10 | |||||
| // see https://stackoverflow.com/a/18792477 for details | |||||
| func getFileNameByHandle(fd uintptr) (string, error) { | |||||
| if procNtQueryObject == nil { | |||||
| return "", errors.New("ntdll.dll: NtQueryObject not supported") | |||||
| } | |||||
| var buf [4 + syscall.MAX_PATH]uint16 | |||||
| var result int | |||||
| r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5, | |||||
| fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0) | |||||
| if r != 0 { | |||||
| return "", e | |||||
| } | |||||
| return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil | |||||
| } | |||||
| // IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 | // IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 | ||||
| // terminal. | // terminal. | ||||
| func IsCygwinTerminal(fd uintptr) bool { | func IsCygwinTerminal(fd uintptr) bool { | ||||
| if procGetFileInformationByHandleEx == nil { | if procGetFileInformationByHandleEx == nil { | ||||
| return false | |||||
| name, err := getFileNameByHandle(fd) | |||||
| if err != nil { | |||||
| return false | |||||
| } | |||||
| return isCygwinPipeName(name) | |||||
| } | } | ||||
| // Cygwin/msys's pty is a pipe. | // Cygwin/msys's pty is a pipe. | ||||
| @@ -0,0 +1,8 @@ | |||||
| language: go | |||||
| go: | |||||
| - tip | |||||
| before_install: | |||||
| - go get github.com/mattn/goveralls | |||||
| - go get golang.org/x/tools/cmd/cover | |||||
| script: | |||||
| - $HOME/gopath/bin/goveralls -repotoken lAKAWPzcGsD3A8yBX3BGGtRUdJ6CaGERL | |||||
| @@ -0,0 +1,21 @@ | |||||
| The MIT License (MIT) | |||||
| Copyright (c) 2016 Yasuhiro Matsumoto | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | |||||
| of this software and associated documentation files (the "Software"), to deal | |||||
| in the Software without restriction, including without limitation the rights | |||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||||
| copies of the Software, and to permit persons to whom the Software is | |||||
| furnished to do so, subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||||
| SOFTWARE. | |||||
| @@ -0,0 +1,27 @@ | |||||
| go-runewidth | |||||
| ============ | |||||
| [](https://travis-ci.org/mattn/go-runewidth) | |||||
| [](https://coveralls.io/r/mattn/go-runewidth?branch=HEAD) | |||||
| [](http://godoc.org/github.com/mattn/go-runewidth) | |||||
| [](https://goreportcard.com/report/github.com/mattn/go-runewidth) | |||||
| Provides functions to get fixed width of the character or string. | |||||
| Usage | |||||
| ----- | |||||
| ```go | |||||
| runewidth.StringWidth("つのだ☆HIRO") == 12 | |||||
| ``` | |||||
| Author | |||||
| ------ | |||||
| Yasuhiro Matsumoto | |||||
| License | |||||
| ------- | |||||
| under the MIT License: http://mattn.mit-license.org/2013 | |||||
| @@ -0,0 +1,3 @@ | |||||
| module github.com/mattn/go-runewidth | |||||
| go 1.9 | |||||
| @@ -0,0 +1,258 @@ | |||||
| package runewidth | |||||
| import ( | |||||
| "os" | |||||
| ) | |||||
| //go:generate go run script/generate.go | |||||
| var ( | |||||
| // EastAsianWidth will be set true if the current locale is CJK | |||||
| EastAsianWidth bool | |||||
| // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ | |||||
| ZeroWidthJoiner bool | |||||
| // DefaultCondition is a condition in current locale | |||||
| DefaultCondition = &Condition{} | |||||
| ) | |||||
| func init() { | |||||
| handleEnv() | |||||
| } | |||||
| func handleEnv() { | |||||
| env := os.Getenv("RUNEWIDTH_EASTASIAN") | |||||
| if env == "" { | |||||
| EastAsianWidth = IsEastAsian() | |||||
| } else { | |||||
| EastAsianWidth = env == "1" | |||||
| } | |||||
| // update DefaultCondition | |||||
| DefaultCondition.EastAsianWidth = EastAsianWidth | |||||
| DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner | |||||
| } | |||||
| type interval struct { | |||||
| first rune | |||||
| last rune | |||||
| } | |||||
| type table []interval | |||||
| func inTables(r rune, ts ...table) bool { | |||||
| for _, t := range ts { | |||||
| if inTable(r, t) { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func inTable(r rune, t table) bool { | |||||
| // func (t table) IncludesRune(r rune) bool { | |||||
| if r < t[0].first { | |||||
| return false | |||||
| } | |||||
| bot := 0 | |||||
| top := len(t) - 1 | |||||
| for top >= bot { | |||||
| mid := (bot + top) >> 1 | |||||
| switch { | |||||
| case t[mid].last < r: | |||||
| bot = mid + 1 | |||||
| case t[mid].first > r: | |||||
| top = mid - 1 | |||||
| default: | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| var private = table{ | |||||
| {0x00E000, 0x00F8FF}, {0x0F0000, 0x0FFFFD}, {0x100000, 0x10FFFD}, | |||||
| } | |||||
| var nonprint = table{ | |||||
| {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD}, | |||||
| {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F}, | |||||
| {0x2028, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF}, | |||||
| {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF}, | |||||
| } | |||||
| // Condition have flag EastAsianWidth whether the current locale is CJK or not. | |||||
| type Condition struct { | |||||
| EastAsianWidth bool | |||||
| ZeroWidthJoiner bool | |||||
| } | |||||
| // NewCondition return new instance of Condition which is current locale. | |||||
| func NewCondition() *Condition { | |||||
| return &Condition{ | |||||
| EastAsianWidth: EastAsianWidth, | |||||
| ZeroWidthJoiner: ZeroWidthJoiner, | |||||
| } | |||||
| } | |||||
| // RuneWidth returns the number of cells in r. | |||||
| // See http://www.unicode.org/reports/tr11/ | |||||
| func (c *Condition) RuneWidth(r rune) int { | |||||
| switch { | |||||
| case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned): | |||||
| return 0 | |||||
| case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth): | |||||
| return 2 | |||||
| default: | |||||
| return 1 | |||||
| } | |||||
| } | |||||
| func (c *Condition) stringWidth(s string) (width int) { | |||||
| for _, r := range []rune(s) { | |||||
| width += c.RuneWidth(r) | |||||
| } | |||||
| return width | |||||
| } | |||||
| func (c *Condition) stringWidthZeroJoiner(s string) (width int) { | |||||
| r1, r2 := rune(0), rune(0) | |||||
| for _, r := range []rune(s) { | |||||
| if r == 0xFE0E || r == 0xFE0F { | |||||
| continue | |||||
| } | |||||
| w := c.RuneWidth(r) | |||||
| if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) { | |||||
| if width < w { | |||||
| width = w | |||||
| } | |||||
| } else { | |||||
| width += w | |||||
| } | |||||
| r1, r2 = r2, r | |||||
| } | |||||
| return width | |||||
| } | |||||
| // StringWidth return width as you can see | |||||
| func (c *Condition) StringWidth(s string) (width int) { | |||||
| if c.ZeroWidthJoiner { | |||||
| return c.stringWidthZeroJoiner(s) | |||||
| } | |||||
| return c.stringWidth(s) | |||||
| } | |||||
| // Truncate return string truncated with w cells | |||||
| func (c *Condition) Truncate(s string, w int, tail string) string { | |||||
| if c.StringWidth(s) <= w { | |||||
| return s | |||||
| } | |||||
| r := []rune(s) | |||||
| tw := c.StringWidth(tail) | |||||
| w -= tw | |||||
| width := 0 | |||||
| i := 0 | |||||
| for ; i < len(r); i++ { | |||||
| cw := c.RuneWidth(r[i]) | |||||
| if width+cw > w { | |||||
| break | |||||
| } | |||||
| width += cw | |||||
| } | |||||
| return string(r[0:i]) + tail | |||||
| } | |||||
| // Wrap return string wrapped with w cells | |||||
| func (c *Condition) Wrap(s string, w int) string { | |||||
| width := 0 | |||||
| out := "" | |||||
| for _, r := range []rune(s) { | |||||
| cw := RuneWidth(r) | |||||
| if r == '\n' { | |||||
| out += string(r) | |||||
| width = 0 | |||||
| continue | |||||
| } else if width+cw > w { | |||||
| out += "\n" | |||||
| width = 0 | |||||
| out += string(r) | |||||
| width += cw | |||||
| continue | |||||
| } | |||||
| out += string(r) | |||||
| width += cw | |||||
| } | |||||
| return out | |||||
| } | |||||
| // FillLeft return string filled in left by spaces in w cells | |||||
| func (c *Condition) FillLeft(s string, w int) string { | |||||
| width := c.StringWidth(s) | |||||
| count := w - width | |||||
| if count > 0 { | |||||
| b := make([]byte, count) | |||||
| for i := range b { | |||||
| b[i] = ' ' | |||||
| } | |||||
| return string(b) + s | |||||
| } | |||||
| return s | |||||
| } | |||||
| // FillRight return string filled in left by spaces in w cells | |||||
| func (c *Condition) FillRight(s string, w int) string { | |||||
| width := c.StringWidth(s) | |||||
| count := w - width | |||||
| if count > 0 { | |||||
| b := make([]byte, count) | |||||
| for i := range b { | |||||
| b[i] = ' ' | |||||
| } | |||||
| return s + string(b) | |||||
| } | |||||
| return s | |||||
| } | |||||
| // RuneWidth returns the number of cells in r. | |||||
| // See http://www.unicode.org/reports/tr11/ | |||||
| func RuneWidth(r rune) int { | |||||
| return DefaultCondition.RuneWidth(r) | |||||
| } | |||||
| // IsAmbiguousWidth returns whether is ambiguous width or not. | |||||
| func IsAmbiguousWidth(r rune) bool { | |||||
| return inTables(r, private, ambiguous) | |||||
| } | |||||
| // IsNeutralWidth returns whether is neutral width or not. | |||||
| func IsNeutralWidth(r rune) bool { | |||||
| return inTable(r, neutral) | |||||
| } | |||||
| // StringWidth return width as you can see | |||||
| func StringWidth(s string) (width int) { | |||||
| return DefaultCondition.StringWidth(s) | |||||
| } | |||||
| // Truncate return string truncated with w cells | |||||
| func Truncate(s string, w int, tail string) string { | |||||
| return DefaultCondition.Truncate(s, w, tail) | |||||
| } | |||||
| // Wrap return string wrapped with w cells | |||||
| func Wrap(s string, w int) string { | |||||
| return DefaultCondition.Wrap(s, w) | |||||
| } | |||||
| // FillLeft return string filled in left by spaces in w cells | |||||
| func FillLeft(s string, w int) string { | |||||
| return DefaultCondition.FillLeft(s, w) | |||||
| } | |||||
| // FillRight return string filled in left by spaces in w cells | |||||
| func FillRight(s string, w int) string { | |||||
| return DefaultCondition.FillRight(s, w) | |||||
| } | |||||
| @@ -0,0 +1,8 @@ | |||||
| // +build appengine | |||||
| package runewidth | |||||
| // IsEastAsian return true if the current locale is CJK | |||||
| func IsEastAsian() bool { | |||||
| return false | |||||
| } | |||||
| @@ -0,0 +1,9 @@ | |||||
| // +build js | |||||
| // +build !appengine | |||||
| package runewidth | |||||
| func IsEastAsian() bool { | |||||
| // TODO: Implement this for the web. Detect east asian in a compatible way, and return true. | |||||
| return false | |||||
| } | |||||
| @@ -0,0 +1,79 @@ | |||||
| // +build !windows | |||||
| // +build !js | |||||
| // +build !appengine | |||||
| package runewidth | |||||
| import ( | |||||
| "os" | |||||
| "regexp" | |||||
| "strings" | |||||
| ) | |||||
| var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`) | |||||
| var mblenTable = map[string]int{ | |||||
| "utf-8": 6, | |||||
| "utf8": 6, | |||||
| "jis": 8, | |||||
| "eucjp": 3, | |||||
| "euckr": 2, | |||||
| "euccn": 2, | |||||
| "sjis": 2, | |||||
| "cp932": 2, | |||||
| "cp51932": 2, | |||||
| "cp936": 2, | |||||
| "cp949": 2, | |||||
| "cp950": 2, | |||||
| "big5": 2, | |||||
| "gbk": 2, | |||||
| "gb2312": 2, | |||||
| } | |||||
| func isEastAsian(locale string) bool { | |||||
| charset := strings.ToLower(locale) | |||||
| r := reLoc.FindStringSubmatch(locale) | |||||
| if len(r) == 2 { | |||||
| charset = strings.ToLower(r[1]) | |||||
| } | |||||
| if strings.HasSuffix(charset, "@cjk_narrow") { | |||||
| return false | |||||
| } | |||||
| for pos, b := range []byte(charset) { | |||||
| if b == '@' { | |||||
| charset = charset[:pos] | |||||
| break | |||||
| } | |||||
| } | |||||
| max := 1 | |||||
| if m, ok := mblenTable[charset]; ok { | |||||
| max = m | |||||
| } | |||||
| if max > 1 && (charset[0] != 'u' || | |||||
| strings.HasPrefix(locale, "ja") || | |||||
| strings.HasPrefix(locale, "ko") || | |||||
| strings.HasPrefix(locale, "zh")) { | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| // IsEastAsian return true if the current locale is CJK | |||||
| func IsEastAsian() bool { | |||||
| locale := os.Getenv("LC_CTYPE") | |||||
| if locale == "" { | |||||
| locale = os.Getenv("LANG") | |||||
| } | |||||
| // ignore C locale | |||||
| if locale == "POSIX" || locale == "C" { | |||||
| return false | |||||
| } | |||||
| if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') { | |||||
| return false | |||||
| } | |||||
| return isEastAsian(locale) | |||||
| } | |||||
| @@ -0,0 +1,427 @@ | |||||
| package runewidth | |||||
| var combining = table{ | |||||
| {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3}, | |||||
| {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01}, | |||||
| {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1ABE}, | |||||
| {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, | |||||
| {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF}, | |||||
| {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, | |||||
| {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1}, | |||||
| {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A}, | |||||
| {0x10F46, 0x10F50}, {0x11300, 0x11301}, {0x1133B, 0x1133C}, | |||||
| {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x16AF0, 0x16AF4}, | |||||
| {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, | |||||
| {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244}, | |||||
| {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, | |||||
| {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E8D0, 0x1E8D6}, | |||||
| } | |||||
| var doublewidth = table{ | |||||
| {0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A}, | |||||
| {0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3}, | |||||
| {0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653}, | |||||
| {0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1}, | |||||
| {0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5}, | |||||
| {0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA}, | |||||
| {0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA}, | |||||
| {0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B}, | |||||
| {0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E}, | |||||
| {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797}, | |||||
| {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C}, | |||||
| {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99}, | |||||
| {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, | |||||
| {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF}, | |||||
| {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31BA}, | |||||
| {0x31C0, 0x31E3}, {0x31F0, 0x321E}, {0x3220, 0x3247}, | |||||
| {0x3250, 0x4DBF}, {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, | |||||
| {0xA960, 0xA97C}, {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, | |||||
| {0xFE10, 0xFE19}, {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, | |||||
| {0xFE68, 0xFE6B}, {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, | |||||
| {0x16FE0, 0x16FE3}, {0x17000, 0x187F7}, {0x18800, 0x18AF2}, | |||||
| {0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, {0x1B164, 0x1B167}, | |||||
| {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, | |||||
| {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, {0x1F200, 0x1F202}, | |||||
| {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, | |||||
| {0x1F260, 0x1F265}, {0x1F300, 0x1F320}, {0x1F32D, 0x1F335}, | |||||
| {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA}, | |||||
| {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4}, | |||||
| {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC}, | |||||
| {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567}, | |||||
| {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4}, | |||||
| {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC}, | |||||
| {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D5}, {0x1F6EB, 0x1F6EC}, | |||||
| {0x1F6F4, 0x1F6FA}, {0x1F7E0, 0x1F7EB}, {0x1F90D, 0x1F971}, | |||||
| {0x1F973, 0x1F976}, {0x1F97A, 0x1F9A2}, {0x1F9A5, 0x1F9AA}, | |||||
| {0x1F9AE, 0x1F9CA}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA73}, | |||||
| {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA82}, {0x1FA90, 0x1FA95}, | |||||
| {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}, | |||||
| } | |||||
| var ambiguous = table{ | |||||
| {0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8}, | |||||
| {0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4}, | |||||
| {0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6}, | |||||
| {0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1}, | |||||
| {0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED}, | |||||
| {0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA}, | |||||
| {0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101}, | |||||
| {0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B}, | |||||
| {0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133}, | |||||
| {0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144}, | |||||
| {0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153}, | |||||
| {0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE}, | |||||
| {0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4}, | |||||
| {0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA}, | |||||
| {0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261}, | |||||
| {0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB}, | |||||
| {0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB}, | |||||
| {0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F}, | |||||
| {0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1}, | |||||
| {0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F}, | |||||
| {0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016}, | |||||
| {0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022}, | |||||
| {0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033}, | |||||
| {0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E}, | |||||
| {0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084}, | |||||
| {0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105}, | |||||
| {0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116}, | |||||
| {0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B}, | |||||
| {0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B}, | |||||
| {0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199}, | |||||
| {0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4}, | |||||
| {0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203}, | |||||
| {0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F}, | |||||
| {0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A}, | |||||
| {0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225}, | |||||
| {0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237}, | |||||
| {0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C}, | |||||
| {0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267}, | |||||
| {0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283}, | |||||
| {0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299}, | |||||
| {0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312}, | |||||
| {0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573}, | |||||
| {0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1}, | |||||
| {0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7}, | |||||
| {0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8}, | |||||
| {0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5}, | |||||
| {0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609}, | |||||
| {0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E}, | |||||
| {0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661}, | |||||
| {0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D}, | |||||
| {0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF}, | |||||
| {0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1}, | |||||
| {0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1}, | |||||
| {0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC}, | |||||
| {0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F}, | |||||
| {0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF}, | |||||
| {0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A}, | |||||
| {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D}, | |||||
| {0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF}, | |||||
| {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD}, | |||||
| } | |||||
| var notassigned = table{ | |||||
| {0x27E6, 0x27ED}, {0x2985, 0x2986}, | |||||
| } | |||||
| var neutral = table{ | |||||
| {0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9}, | |||||
| {0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB}, | |||||
| {0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6}, | |||||
| {0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7}, | |||||
| {0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1}, | |||||
| {0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD}, | |||||
| {0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112}, | |||||
| {0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A}, | |||||
| {0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E}, | |||||
| {0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C}, | |||||
| {0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A}, | |||||
| {0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1}, | |||||
| {0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7}, | |||||
| {0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250}, | |||||
| {0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6}, | |||||
| {0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF}, | |||||
| {0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE}, | |||||
| {0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F}, | |||||
| {0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390}, | |||||
| {0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400}, | |||||
| {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F}, | |||||
| {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F}, | |||||
| {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4}, | |||||
| {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A}, | |||||
| {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D}, | |||||
| {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E}, | |||||
| {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08BD}, | |||||
| {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990}, | |||||
| {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, | |||||
| {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, | |||||
| {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, | |||||
| {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, | |||||
| {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, | |||||
| {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, | |||||
| {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, | |||||
| {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, | |||||
| {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, | |||||
| {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, | |||||
| {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, | |||||
| {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, | |||||
| {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, | |||||
| {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, | |||||
| {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, | |||||
| {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, | |||||
| {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, | |||||
| {0x0B56, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, | |||||
| {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, | |||||
| {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, | |||||
| {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, | |||||
| {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, | |||||
| {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, | |||||
| {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, | |||||
| {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, | |||||
| {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, | |||||
| {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63}, | |||||
| {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90}, | |||||
| {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9}, | |||||
| {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD}, | |||||
| {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3}, | |||||
| {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D03}, | |||||
| {0x0D05, 0x0D0C}, {0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, | |||||
| {0x0D46, 0x0D48}, {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, | |||||
| {0x0D66, 0x0D7F}, {0x0D82, 0x0D83}, {0x0D85, 0x0D96}, | |||||
| {0x0D9A, 0x0DB1}, {0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD}, | |||||
| {0x0DC0, 0x0DC6}, {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4}, | |||||
| {0x0DD6, 0x0DD6}, {0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF}, | |||||
| {0x0DF2, 0x0DF4}, {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, | |||||
| {0x0E81, 0x0E82}, {0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, | |||||
| {0x0E8C, 0x0EA3}, {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, | |||||
| {0x0EC0, 0x0EC4}, {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, | |||||
| {0x0ED0, 0x0ED9}, {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, | |||||
| {0x0F49, 0x0F6C}, {0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, | |||||
| {0x0FBE, 0x0FCC}, {0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, | |||||
| {0x10C7, 0x10C7}, {0x10CD, 0x10CD}, {0x10D0, 0x10FF}, | |||||
| {0x1160, 0x1248}, {0x124A, 0x124D}, {0x1250, 0x1256}, | |||||
| {0x1258, 0x1258}, {0x125A, 0x125D}, {0x1260, 0x1288}, | |||||
| {0x128A, 0x128D}, {0x1290, 0x12B0}, {0x12B2, 0x12B5}, | |||||
| {0x12B8, 0x12BE}, {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, | |||||
| {0x12C8, 0x12D6}, {0x12D8, 0x1310}, {0x1312, 0x1315}, | |||||
| {0x1318, 0x135A}, {0x135D, 0x137C}, {0x1380, 0x1399}, | |||||
| {0x13A0, 0x13F5}, {0x13F8, 0x13FD}, {0x1400, 0x169C}, | |||||
| {0x16A0, 0x16F8}, {0x1700, 0x170C}, {0x170E, 0x1714}, | |||||
| {0x1720, 0x1736}, {0x1740, 0x1753}, {0x1760, 0x176C}, | |||||
| {0x176E, 0x1770}, {0x1772, 0x1773}, {0x1780, 0x17DD}, | |||||
| {0x17E0, 0x17E9}, {0x17F0, 0x17F9}, {0x1800, 0x180E}, | |||||
| {0x1810, 0x1819}, {0x1820, 0x1878}, {0x1880, 0x18AA}, | |||||
| {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1920, 0x192B}, | |||||
| {0x1930, 0x193B}, {0x1940, 0x1940}, {0x1944, 0x196D}, | |||||
| {0x1970, 0x1974}, {0x1980, 0x19AB}, {0x19B0, 0x19C9}, | |||||
| {0x19D0, 0x19DA}, {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, | |||||
| {0x1A60, 0x1A7C}, {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, | |||||
| {0x1AA0, 0x1AAD}, {0x1AB0, 0x1ABE}, {0x1B00, 0x1B4B}, | |||||
| {0x1B50, 0x1B7C}, {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, | |||||
| {0x1C3B, 0x1C49}, {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, | |||||
| {0x1CBD, 0x1CC7}, {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, | |||||
| {0x1DFB, 0x1F15}, {0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, | |||||
| {0x1F48, 0x1F4D}, {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, | |||||
| {0x1F5B, 0x1F5B}, {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, | |||||
| {0x1F80, 0x1FB4}, {0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, | |||||
| {0x1FD6, 0x1FDB}, {0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, | |||||
| {0x1FF6, 0x1FFE}, {0x2000, 0x200F}, {0x2011, 0x2012}, | |||||
| {0x2017, 0x2017}, {0x201A, 0x201B}, {0x201E, 0x201F}, | |||||
| {0x2023, 0x2023}, {0x2028, 0x202F}, {0x2031, 0x2031}, | |||||
| {0x2034, 0x2034}, {0x2036, 0x203A}, {0x203C, 0x203D}, | |||||
| {0x203F, 0x2064}, {0x2066, 0x2071}, {0x2075, 0x207E}, | |||||
| {0x2080, 0x2080}, {0x2085, 0x208E}, {0x2090, 0x209C}, | |||||
| {0x20A0, 0x20A8}, {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, | |||||
| {0x20D0, 0x20F0}, {0x2100, 0x2102}, {0x2104, 0x2104}, | |||||
| {0x2106, 0x2108}, {0x210A, 0x2112}, {0x2114, 0x2115}, | |||||
| {0x2117, 0x2120}, {0x2123, 0x2125}, {0x2127, 0x212A}, | |||||
| {0x212C, 0x2152}, {0x2155, 0x215A}, {0x215F, 0x215F}, | |||||
| {0x216C, 0x216F}, {0x217A, 0x2188}, {0x218A, 0x218B}, | |||||
| {0x219A, 0x21B7}, {0x21BA, 0x21D1}, {0x21D3, 0x21D3}, | |||||
| {0x21D5, 0x21E6}, {0x21E8, 0x21FF}, {0x2201, 0x2201}, | |||||
| {0x2204, 0x2206}, {0x2209, 0x220A}, {0x220C, 0x220E}, | |||||
| {0x2210, 0x2210}, {0x2212, 0x2214}, {0x2216, 0x2219}, | |||||
| {0x221B, 0x221C}, {0x2221, 0x2222}, {0x2224, 0x2224}, | |||||
| {0x2226, 0x2226}, {0x222D, 0x222D}, {0x222F, 0x2233}, | |||||
| {0x2238, 0x223B}, {0x223E, 0x2247}, {0x2249, 0x224B}, | |||||
| {0x224D, 0x2251}, {0x2253, 0x225F}, {0x2262, 0x2263}, | |||||
| {0x2268, 0x2269}, {0x226C, 0x226D}, {0x2270, 0x2281}, | |||||
| {0x2284, 0x2285}, {0x2288, 0x2294}, {0x2296, 0x2298}, | |||||
| {0x229A, 0x22A4}, {0x22A6, 0x22BE}, {0x22C0, 0x2311}, | |||||
| {0x2313, 0x2319}, {0x231C, 0x2328}, {0x232B, 0x23E8}, | |||||
| {0x23ED, 0x23EF}, {0x23F1, 0x23F2}, {0x23F4, 0x2426}, | |||||
| {0x2440, 0x244A}, {0x24EA, 0x24EA}, {0x254C, 0x254F}, | |||||
| {0x2574, 0x257F}, {0x2590, 0x2591}, {0x2596, 0x259F}, | |||||
| {0x25A2, 0x25A2}, {0x25AA, 0x25B1}, {0x25B4, 0x25B5}, | |||||
| {0x25B8, 0x25BB}, {0x25BE, 0x25BF}, {0x25C2, 0x25C5}, | |||||
| {0x25C9, 0x25CA}, {0x25CC, 0x25CD}, {0x25D2, 0x25E1}, | |||||
| {0x25E6, 0x25EE}, {0x25F0, 0x25FC}, {0x25FF, 0x2604}, | |||||
| {0x2607, 0x2608}, {0x260A, 0x260D}, {0x2610, 0x2613}, | |||||
| {0x2616, 0x261B}, {0x261D, 0x261D}, {0x261F, 0x263F}, | |||||
| {0x2641, 0x2641}, {0x2643, 0x2647}, {0x2654, 0x265F}, | |||||
| {0x2662, 0x2662}, {0x2666, 0x2666}, {0x266B, 0x266B}, | |||||
| {0x266E, 0x266E}, {0x2670, 0x267E}, {0x2680, 0x2692}, | |||||
| {0x2694, 0x269D}, {0x26A0, 0x26A0}, {0x26A2, 0x26A9}, | |||||
| {0x26AC, 0x26BC}, {0x26C0, 0x26C3}, {0x26E2, 0x26E2}, | |||||
| {0x26E4, 0x26E7}, {0x2700, 0x2704}, {0x2706, 0x2709}, | |||||
| {0x270C, 0x2727}, {0x2729, 0x273C}, {0x273E, 0x274B}, | |||||
| {0x274D, 0x274D}, {0x274F, 0x2752}, {0x2756, 0x2756}, | |||||
| {0x2758, 0x2775}, {0x2780, 0x2794}, {0x2798, 0x27AF}, | |||||
| {0x27B1, 0x27BE}, {0x27C0, 0x27E5}, {0x27EE, 0x2984}, | |||||
| {0x2987, 0x2B1A}, {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, | |||||
| {0x2B5A, 0x2B73}, {0x2B76, 0x2B95}, {0x2B98, 0x2C2E}, | |||||
| {0x2C30, 0x2C5E}, {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, | |||||
| {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, | |||||
| {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, | |||||
| {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, | |||||
| {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, | |||||
| {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E4F}, {0x303F, 0x303F}, | |||||
| {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, | |||||
| {0xA700, 0xA7BF}, {0xA7C2, 0xA7C6}, {0xA7F7, 0xA82B}, | |||||
| {0xA830, 0xA839}, {0xA840, 0xA877}, {0xA880, 0xA8C5}, | |||||
| {0xA8CE, 0xA8D9}, {0xA8E0, 0xA953}, {0xA95F, 0xA95F}, | |||||
| {0xA980, 0xA9CD}, {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, | |||||
| {0xAA00, 0xAA36}, {0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, | |||||
| {0xAA5C, 0xAAC2}, {0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, | |||||
| {0xAB09, 0xAB0E}, {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, | |||||
| {0xAB28, 0xAB2E}, {0xAB30, 0xAB67}, {0xAB70, 0xABED}, | |||||
| {0xABF0, 0xABF9}, {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, | |||||
| {0xD800, 0xDFFF}, {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, | |||||
| {0xFB1D, 0xFB36}, {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, | |||||
| {0xFB40, 0xFB41}, {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, | |||||
| {0xFBD3, 0xFD3F}, {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, | |||||
| {0xFDF0, 0xFDFD}, {0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, | |||||
| {0xFE76, 0xFEFC}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, | |||||
| {0x10000, 0x1000B}, {0x1000D, 0x10026}, {0x10028, 0x1003A}, | |||||
| {0x1003C, 0x1003D}, {0x1003F, 0x1004D}, {0x10050, 0x1005D}, | |||||
| {0x10080, 0x100FA}, {0x10100, 0x10102}, {0x10107, 0x10133}, | |||||
| {0x10137, 0x1018E}, {0x10190, 0x1019B}, {0x101A0, 0x101A0}, | |||||
| {0x101D0, 0x101FD}, {0x10280, 0x1029C}, {0x102A0, 0x102D0}, | |||||
| {0x102E0, 0x102FB}, {0x10300, 0x10323}, {0x1032D, 0x1034A}, | |||||
| {0x10350, 0x1037A}, {0x10380, 0x1039D}, {0x1039F, 0x103C3}, | |||||
| {0x103C8, 0x103D5}, {0x10400, 0x1049D}, {0x104A0, 0x104A9}, | |||||
| {0x104B0, 0x104D3}, {0x104D8, 0x104FB}, {0x10500, 0x10527}, | |||||
| {0x10530, 0x10563}, {0x1056F, 0x1056F}, {0x10600, 0x10736}, | |||||
| {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10800, 0x10805}, | |||||
| {0x10808, 0x10808}, {0x1080A, 0x10835}, {0x10837, 0x10838}, | |||||
| {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10857, 0x1089E}, | |||||
| {0x108A7, 0x108AF}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, | |||||
| {0x108FB, 0x1091B}, {0x1091F, 0x10939}, {0x1093F, 0x1093F}, | |||||
| {0x10980, 0x109B7}, {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, | |||||
| {0x10A05, 0x10A06}, {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, | |||||
| {0x10A19, 0x10A35}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, | |||||
| {0x10A50, 0x10A58}, {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, | |||||
| {0x10AEB, 0x10AF6}, {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, | |||||
| {0x10B58, 0x10B72}, {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, | |||||
| {0x10BA9, 0x10BAF}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, | |||||
| {0x10CC0, 0x10CF2}, {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, | |||||
| {0x10E60, 0x10E7E}, {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, | |||||
| {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F}, | |||||
| {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, | |||||
| {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11146}, | |||||
| {0x11150, 0x11176}, {0x11180, 0x111CD}, {0x111D0, 0x111DF}, | |||||
| {0x111E1, 0x111F4}, {0x11200, 0x11211}, {0x11213, 0x1123E}, | |||||
| {0x11280, 0x11286}, {0x11288, 0x11288}, {0x1128A, 0x1128D}, | |||||
| {0x1128F, 0x1129D}, {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, | |||||
| {0x112F0, 0x112F9}, {0x11300, 0x11303}, {0x11305, 0x1130C}, | |||||
| {0x1130F, 0x11310}, {0x11313, 0x11328}, {0x1132A, 0x11330}, | |||||
| {0x11332, 0x11333}, {0x11335, 0x11339}, {0x1133B, 0x11344}, | |||||
| {0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11350, 0x11350}, | |||||
| {0x11357, 0x11357}, {0x1135D, 0x11363}, {0x11366, 0x1136C}, | |||||
| {0x11370, 0x11374}, {0x11400, 0x11459}, {0x1145B, 0x1145B}, | |||||
| {0x1145D, 0x1145F}, {0x11480, 0x114C7}, {0x114D0, 0x114D9}, | |||||
| {0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644}, | |||||
| {0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B8}, | |||||
| {0x116C0, 0x116C9}, {0x11700, 0x1171A}, {0x1171D, 0x1172B}, | |||||
| {0x11730, 0x1173F}, {0x11800, 0x1183B}, {0x118A0, 0x118F2}, | |||||
| {0x118FF, 0x118FF}, {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, | |||||
| {0x119DA, 0x119E4}, {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, | |||||
| {0x11AC0, 0x11AF8}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, | |||||
| {0x11C38, 0x11C45}, {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, | |||||
| {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, | |||||
| {0x11D08, 0x11D09}, {0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A}, | |||||
| {0x11D3C, 0x11D3D}, {0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, | |||||
| {0x11D60, 0x11D65}, {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, | |||||
| {0x11D90, 0x11D91}, {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, | |||||
| {0x11EE0, 0x11EF8}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399}, | |||||
| {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543}, | |||||
| {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646}, | |||||
| {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69}, | |||||
| {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5}, | |||||
| {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61}, | |||||
| {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A}, | |||||
| {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F}, | |||||
| {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, | |||||
| {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5}, | |||||
| {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245}, | |||||
| {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, | |||||
| {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, | |||||
| {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, | |||||
| {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, | |||||
| {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, | |||||
| {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, | |||||
| {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, | |||||
| {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, | |||||
| {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, | |||||
| {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, | |||||
| {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D}, | |||||
| {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9}, | |||||
| {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6}, | |||||
| {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F}, | |||||
| {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03}, | |||||
| {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, | |||||
| {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37}, | |||||
| {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42}, | |||||
| {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B}, | |||||
| {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54}, | |||||
| {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B}, | |||||
| {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62}, | |||||
| {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72}, | |||||
| {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E}, | |||||
| {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3}, | |||||
| {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1}, | |||||
| {0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093}, | |||||
| {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE}, | |||||
| {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10C}, {0x1F12E, 0x1F12F}, | |||||
| {0x1F16A, 0x1F16C}, {0x1F1E6, 0x1F1FF}, {0x1F321, 0x1F32C}, | |||||
| {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D}, {0x1F394, 0x1F39F}, | |||||
| {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF}, {0x1F3F1, 0x1F3F3}, | |||||
| {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F}, {0x1F441, 0x1F441}, | |||||
| {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A}, {0x1F54F, 0x1F54F}, | |||||
| {0x1F568, 0x1F579}, {0x1F57B, 0x1F594}, {0x1F597, 0x1F5A3}, | |||||
| {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F}, {0x1F6C6, 0x1F6CB}, | |||||
| {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4}, {0x1F6E0, 0x1F6EA}, | |||||
| {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, {0x1F780, 0x1F7D8}, | |||||
| {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859}, | |||||
| {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0x1F900, 0x1F90B}, | |||||
| {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D}, {0xE0001, 0xE0001}, | |||||
| {0xE0020, 0xE007F}, | |||||
| } | |||||
| var emoji = table{ | |||||
| {0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122}, | |||||
| {0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA}, | |||||
| {0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388}, | |||||
| {0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA}, | |||||
| {0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6}, | |||||
| {0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605}, | |||||
| {0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705}, | |||||
| {0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716}, | |||||
| {0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728}, | |||||
| {0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747}, | |||||
| {0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755}, | |||||
| {0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797}, | |||||
| {0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, | |||||
| {0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C}, | |||||
| {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030}, | |||||
| {0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299}, | |||||
| {0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F}, | |||||
| {0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E}, | |||||
| {0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F}, | |||||
| {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A}, | |||||
| {0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D}, | |||||
| {0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F}, | |||||
| {0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, | |||||
| {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF}, | |||||
| {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FFFD}, | |||||
| } | |||||
| @@ -0,0 +1,28 @@ | |||||
| // +build windows | |||||
| // +build !appengine | |||||
| package runewidth | |||||
| import ( | |||||
| "syscall" | |||||
| ) | |||||
| var ( | |||||
| kernel32 = syscall.NewLazyDLL("kernel32") | |||||
| procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP") | |||||
| ) | |||||
| // IsEastAsian return true if the current locale is CJK | |||||
| func IsEastAsian() bool { | |||||
| r1, _, _ := procGetConsoleOutputCP.Call() | |||||
| if r1 == 0 { | |||||
| return false | |||||
| } | |||||
| switch int(r1) { | |||||
| case 932, 51932, 936, 949, 950: | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| @@ -0,0 +1,2 @@ | |||||
| language: go | |||||
| go: master | |||||
| @@ -0,0 +1,21 @@ | |||||
| MIT License | |||||
| Copyright (c) 2018 Minko Gechev | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | |||||
| of this software and associated documentation files (the "Software"), to deal | |||||
| in the Software without restriction, including without limitation the rights | |||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||||
| copies of the Software, and to permit persons to whom the Software is | |||||
| furnished to do so, subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||||
| SOFTWARE. | |||||
| @@ -0,0 +1,100 @@ | |||||
| [](https://travis-ci.org/mgechev/dots) | |||||
| # Dots | |||||
| Implements the wildcard file matching in Go used by golint, go test etc. | |||||
| ## Usage | |||||
| ```go | |||||
| import "github.com/mgechev/dots" | |||||
| func main() { | |||||
| result, err := dots.Resolve([]string{"./fixtures/..."}, []string{"./fixtures/foo"}) | |||||
| for _, f := range result { | |||||
| fmt.Println(f); | |||||
| } | |||||
| } | |||||
| ``` | |||||
| If we suppose that we have the following directory structure: | |||||
| ```text | |||||
| ├── README.md | |||||
| ├── fixtures | |||||
| │ ├── bar | |||||
| │ │ ├── bar1.go | |||||
| │ │ └── bar2.go | |||||
| │ ├── baz | |||||
| │ │ ├── baz1.go | |||||
| │ │ ├── baz2.go | |||||
| │ │ └── baz3.go | |||||
| │ └── foo | |||||
| │ ├── foo1.go | |||||
| │ ├── foo2.go | |||||
| │ └── foo3.go | |||||
| └── main.go | |||||
| ``` | |||||
| The result will be: | |||||
| ```text | |||||
| fixtures/bar/bar1.go | |||||
| fixtures/bar/bar2.go | |||||
| fixtures/baz/baz1.go | |||||
| fixtures/baz/baz2.go | |||||
| fixtures/baz/baz3.go | |||||
| ``` | |||||
| `dots` supports wildcard in both - the first and the last argument of `Resolve`, which means that you can ignore files based on a wildcard: | |||||
| ```go | |||||
| dots.Resolve([]string{"github.com/mgechev/dots"}, []string{"./..."}) // empty list | |||||
| dots.Resolve([]string{"./fixtures/bar/..."}, []string{"./fixture/foo/...", "./fixtures/baz/..."}) // bar1.go, bar2.go | |||||
| ``` | |||||
| ## Preserve package structure | |||||
| `dots` allow you to receive a slice of slices where each nested slice represents an individual package: | |||||
| ```go | |||||
| dots.ResolvePackages([]string{"github.com/mgechev/dots/..."}, []string{}) | |||||
| ``` | |||||
| So we will get the result: | |||||
| ```text | |||||
| [ | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar1.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar2.go" | |||||
| ], | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz1.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz2.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz3.go" | |||||
| ], | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo1.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo2.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo3.go" | |||||
| ], | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz1.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz2.go" | |||||
| ], | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo1.go", | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo2.go" | |||||
| ], | |||||
| [ | |||||
| "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/bar/bar1.go" | |||||
| ] | |||||
| ] | |||||
| ``` | |||||
| This method is especially useful, when you want to perform type checking over given package from the result. | |||||
| ## License | |||||
| MIT | |||||
| @@ -0,0 +1,456 @@ | |||||
| package dots | |||||
| import ( | |||||
| "go/build" | |||||
| "log" | |||||
| "os" | |||||
| "path" | |||||
| "path/filepath" | |||||
| "regexp" | |||||
| "runtime" | |||||
| "strings" | |||||
| ) | |||||
| var ( | |||||
| buildContext = build.Default | |||||
| goroot = filepath.Clean(runtime.GOROOT()) | |||||
| gorootSrc = filepath.Join(goroot, "src") | |||||
| ) | |||||
| func flatten(arr [][]string) []string { | |||||
| var res []string | |||||
| for _, e := range arr { | |||||
| res = append(res, e...) | |||||
| } | |||||
| return res | |||||
| } | |||||
| // Resolve accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped. | |||||
| // The final result is the set of all files from the selected directories subtracted with | |||||
| // the files in the skip slice. | |||||
| func Resolve(includePatterns, skipPatterns []string) ([]string, error) { | |||||
| skip, err := resolvePatterns(skipPatterns) | |||||
| filter := newPathFilter(flatten(skip)) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| pathSet := map[string]bool{} | |||||
| includePackages, err := resolvePatterns(includePatterns) | |||||
| include := flatten(includePackages) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var result []string | |||||
| for _, i := range include { | |||||
| if _, ok := pathSet[i]; !ok && !filter(i) { | |||||
| pathSet[i] = true | |||||
| result = append(result, i) | |||||
| } | |||||
| } | |||||
| return result, err | |||||
| } | |||||
| // ResolvePackages accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped. | |||||
| // The final result is the set of all files from the selected directories subtracted with | |||||
| // the files in the skip slice. The difference between `Resolve` and `ResolvePackages` | |||||
| // is that `ResolvePackages` preserves the package structure in the nested slices. | |||||
| func ResolvePackages(includePatterns, skipPatterns []string) ([][]string, error) { | |||||
| skip, err := resolvePatterns(skipPatterns) | |||||
| filter := newPathFilter(flatten(skip)) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| pathSet := map[string]bool{} | |||||
| include, err := resolvePatterns(includePatterns) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var result [][]string | |||||
| for _, p := range include { | |||||
| var packageFiles []string | |||||
| for _, f := range p { | |||||
| if _, ok := pathSet[f]; !ok && !filter(f) { | |||||
| pathSet[f] = true | |||||
| packageFiles = append(packageFiles, f) | |||||
| } | |||||
| } | |||||
| result = append(result, packageFiles) | |||||
| } | |||||
| return result, err | |||||
| } | |||||
| func isDir(filename string) bool { | |||||
| fi, err := os.Stat(filename) | |||||
| return err == nil && fi.IsDir() | |||||
| } | |||||
| func exists(filename string) bool { | |||||
| _, err := os.Stat(filename) | |||||
| return err == nil | |||||
| } | |||||
| func resolveDir(dirname string) ([]string, error) { | |||||
| pkg, err := build.ImportDir(dirname, 0) | |||||
| return resolveImportedPackage(pkg, err) | |||||
| } | |||||
| func resolvePackage(pkgname string) ([]string, error) { | |||||
| pkg, err := build.Import(pkgname, ".", 0) | |||||
| return resolveImportedPackage(pkg, err) | |||||
| } | |||||
| func resolveImportedPackage(pkg *build.Package, err error) ([]string, error) { | |||||
| if err != nil { | |||||
| if _, nogo := err.(*build.NoGoError); nogo { | |||||
| // Don't complain if the failure is due to no Go source files. | |||||
| return nil, nil | |||||
| } | |||||
| return nil, err | |||||
| } | |||||
| var files []string | |||||
| files = append(files, pkg.GoFiles...) | |||||
| files = append(files, pkg.CgoFiles...) | |||||
| files = append(files, pkg.TestGoFiles...) | |||||
| if pkg.Dir != "." { | |||||
| for i, f := range files { | |||||
| files[i] = filepath.Join(pkg.Dir, f) | |||||
| } | |||||
| } | |||||
| return files, nil | |||||
| } | |||||
| func resolvePatterns(patterns []string) ([][]string, error) { | |||||
| var files [][]string | |||||
| for _, pattern := range patterns { | |||||
| f, err := resolvePattern(pattern) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| files = append(files, f...) | |||||
| } | |||||
| return files, nil | |||||
| } | |||||
| func resolvePattern(pattern string) ([][]string, error) { | |||||
| // dirsRun, filesRun, and pkgsRun indicate whether golint is applied to | |||||
| // directory, file or package targets. The distinction affects which | |||||
| // checks are run. It is no valid to mix target types. | |||||
| var dirsRun, filesRun, pkgsRun int | |||||
| var matches []string | |||||
| if strings.HasSuffix(pattern, "/...") && isDir(pattern[:len(pattern)-len("/...")]) { | |||||
| dirsRun = 1 | |||||
| for _, dirname := range matchPackagesInFS(pattern) { | |||||
| matches = append(matches, dirname) | |||||
| } | |||||
| } else if isDir(pattern) { | |||||
| dirsRun = 1 | |||||
| matches = append(matches, pattern) | |||||
| } else if exists(pattern) { | |||||
| filesRun = 1 | |||||
| matches = append(matches, pattern) | |||||
| } else { | |||||
| pkgsRun = 1 | |||||
| matches = append(matches, pattern) | |||||
| } | |||||
| result := [][]string{} | |||||
| switch { | |||||
| case dirsRun == 1: | |||||
| for _, dir := range matches { | |||||
| res, err := resolveDir(dir) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| result = append(result, res) | |||||
| } | |||||
| case filesRun == 1: | |||||
| return [][]string{matches}, nil | |||||
| case pkgsRun == 1: | |||||
| for _, pkg := range importPaths(matches) { | |||||
| res, err := resolvePackage(pkg) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| result = append(result, res) | |||||
| } | |||||
| } | |||||
| return result, nil | |||||
| } | |||||
| func newPathFilter(skip []string) func(string) bool { | |||||
| filter := map[string]bool{} | |||||
| for _, name := range skip { | |||||
| filter[name] = true | |||||
| } | |||||
| return func(path string) bool { | |||||
| base := filepath.Base(path) | |||||
| if filter[base] || filter[path] { | |||||
| return true | |||||
| } | |||||
| return base != "." && base != ".." && strings.ContainsAny(base[0:1], "_.") | |||||
| } | |||||
| } | |||||
| // importPathsNoDotExpansion returns the import paths to use for the given | |||||
| // command line, but it does no ... expansion. | |||||
| func importPathsNoDotExpansion(args []string) []string { | |||||
| if len(args) == 0 { | |||||
| return []string{"."} | |||||
| } | |||||
| var out []string | |||||
| for _, a := range args { | |||||
| // Arguments are supposed to be import paths, but | |||||
| // as a courtesy to Windows developers, rewrite \ to / | |||||
| // in command-line arguments. Handles .\... and so on. | |||||
| if filepath.Separator == '\\' { | |||||
| a = strings.Replace(a, `\`, `/`, -1) | |||||
| } | |||||
| // Put argument in canonical form, but preserve leading ./. | |||||
| if strings.HasPrefix(a, "./") { | |||||
| a = "./" + path.Clean(a) | |||||
| if a == "./." { | |||||
| a = "." | |||||
| } | |||||
| } else { | |||||
| a = path.Clean(a) | |||||
| } | |||||
| if a == "all" || a == "std" { | |||||
| out = append(out, matchPackages(a)...) | |||||
| continue | |||||
| } | |||||
| out = append(out, a) | |||||
| } | |||||
| return out | |||||
| } | |||||
| // importPaths returns the import paths to use for the given command line. | |||||
| func importPaths(args []string) []string { | |||||
| args = importPathsNoDotExpansion(args) | |||||
| var out []string | |||||
| for _, a := range args { | |||||
| if strings.Contains(a, "...") { | |||||
| if build.IsLocalImport(a) { | |||||
| out = append(out, matchPackagesInFS(a)...) | |||||
| } else { | |||||
| out = append(out, matchPackages(a)...) | |||||
| } | |||||
| continue | |||||
| } | |||||
| out = append(out, a) | |||||
| } | |||||
| return out | |||||
| } | |||||
| // matchPattern(pattern)(name) reports whether | |||||
| // name matches pattern. Pattern is a limited glob | |||||
| // pattern in which '...' means 'any string' and there | |||||
| // is no other special syntax. | |||||
| func matchPattern(pattern string) func(name string) bool { | |||||
| re := regexp.QuoteMeta(pattern) | |||||
| re = strings.Replace(re, `\.\.\.`, `.*`, -1) | |||||
| // Special case: foo/... matches foo too. | |||||
| if strings.HasSuffix(re, `/.*`) { | |||||
| re = re[:len(re)-len(`/.*`)] + `(/.*)?` | |||||
| } | |||||
| reg := regexp.MustCompile(`^` + re + `$`) | |||||
| return func(name string) bool { | |||||
| return reg.MatchString(name) | |||||
| } | |||||
| } | |||||
| // hasPathPrefix reports whether the path s begins with the | |||||
| // elements in prefix. | |||||
| func hasPathPrefix(s, prefix string) bool { | |||||
| switch { | |||||
| default: | |||||
| return false | |||||
| case len(s) == len(prefix): | |||||
| return s == prefix | |||||
| case len(s) > len(prefix): | |||||
| if prefix != "" && prefix[len(prefix)-1] == '/' { | |||||
| return strings.HasPrefix(s, prefix) | |||||
| } | |||||
| return s[len(prefix)] == '/' && s[:len(prefix)] == prefix | |||||
| } | |||||
| } | |||||
| // treeCanMatchPattern(pattern)(name) reports whether | |||||
| // name or children of name can possibly match pattern. | |||||
| // Pattern is the same limited glob accepted by matchPattern. | |||||
| func treeCanMatchPattern(pattern string) func(name string) bool { | |||||
| wildCard := false | |||||
| if i := strings.Index(pattern, "..."); i >= 0 { | |||||
| wildCard = true | |||||
| pattern = pattern[:i] | |||||
| } | |||||
| return func(name string) bool { | |||||
| return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || | |||||
| wildCard && strings.HasPrefix(name, pattern) | |||||
| } | |||||
| } | |||||
| func matchPackages(pattern string) []string { | |||||
| match := func(string) bool { return true } | |||||
| treeCanMatch := func(string) bool { return true } | |||||
| if pattern != "all" && pattern != "std" { | |||||
| match = matchPattern(pattern) | |||||
| treeCanMatch = treeCanMatchPattern(pattern) | |||||
| } | |||||
| have := map[string]bool{ | |||||
| "builtin": true, // ignore pseudo-package that exists only for documentation | |||||
| } | |||||
| if !buildContext.CgoEnabled { | |||||
| have["runtime/cgo"] = true // ignore during walk | |||||
| } | |||||
| var pkgs []string | |||||
| // Commands | |||||
| cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) | |||||
| filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { | |||||
| if err != nil || !fi.IsDir() || path == cmd { | |||||
| return nil | |||||
| } | |||||
| name := path[len(cmd):] | |||||
| if !treeCanMatch(name) { | |||||
| return filepath.SkipDir | |||||
| } | |||||
| // Commands are all in cmd/, not in subdirectories. | |||||
| if strings.Contains(name, string(filepath.Separator)) { | |||||
| return filepath.SkipDir | |||||
| } | |||||
| // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. | |||||
| name = "cmd/" + name | |||||
| if have[name] { | |||||
| return nil | |||||
| } | |||||
| have[name] = true | |||||
| if !match(name) { | |||||
| return nil | |||||
| } | |||||
| _, err = buildContext.ImportDir(path, 0) | |||||
| if err != nil { | |||||
| if _, noGo := err.(*build.NoGoError); !noGo { | |||||
| log.Print(err) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| pkgs = append(pkgs, name) | |||||
| return nil | |||||
| }) | |||||
| for _, src := range buildContext.SrcDirs() { | |||||
| if (pattern == "std" || pattern == "cmd") && src != gorootSrc { | |||||
| continue | |||||
| } | |||||
| src = filepath.Clean(src) + string(filepath.Separator) | |||||
| root := src | |||||
| if pattern == "cmd" { | |||||
| root += "cmd" + string(filepath.Separator) | |||||
| } | |||||
| filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { | |||||
| if err != nil || !fi.IsDir() || path == src { | |||||
| return nil | |||||
| } | |||||
| // Avoid .foo, _foo, and testdata directory trees. | |||||
| _, elem := filepath.Split(path) | |||||
| if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { | |||||
| return filepath.SkipDir | |||||
| } | |||||
| name := filepath.ToSlash(path[len(src):]) | |||||
| if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { | |||||
| // The name "std" is only the standard library. | |||||
| // If the name is cmd, it's the root of the command tree. | |||||
| return filepath.SkipDir | |||||
| } | |||||
| if !treeCanMatch(name) { | |||||
| return filepath.SkipDir | |||||
| } | |||||
| if have[name] { | |||||
| return nil | |||||
| } | |||||
| have[name] = true | |||||
| if !match(name) { | |||||
| return nil | |||||
| } | |||||
| _, err = buildContext.ImportDir(path, 0) | |||||
| if err != nil { | |||||
| if _, noGo := err.(*build.NoGoError); noGo { | |||||
| return nil | |||||
| } | |||||
| } | |||||
| pkgs = append(pkgs, name) | |||||
| return nil | |||||
| }) | |||||
| } | |||||
| return pkgs | |||||
| } | |||||
| func matchPackagesInFS(pattern string) []string { | |||||
| // Find directory to begin the scan. | |||||
| // Could be smarter but this one optimization | |||||
| // is enough for now, since ... is usually at the | |||||
| // end of a path. | |||||
| i := strings.Index(pattern, "...") | |||||
| dir, _ := path.Split(pattern[:i]) | |||||
| // pattern begins with ./ or ../. | |||||
| // path.Clean will discard the ./ but not the ../. | |||||
| // We need to preserve the ./ for pattern matching | |||||
| // and in the returned import paths. | |||||
| prefix := "" | |||||
| if strings.HasPrefix(pattern, "./") { | |||||
| prefix = "./" | |||||
| } | |||||
| match := matchPattern(pattern) | |||||
| var pkgs []string | |||||
| filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { | |||||
| if err != nil || !fi.IsDir() { | |||||
| return nil | |||||
| } | |||||
| if path == dir { | |||||
| // filepath.Walk starts at dir and recurses. For the recursive case, | |||||
| // the path is the result of filepath.Join, which calls filepath.Clean. | |||||
| // The initial case is not Cleaned, though, so we do this explicitly. | |||||
| // | |||||
| // This converts a path like "./io/" to "io". Without this step, running | |||||
| // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io | |||||
| // package, because prepending the prefix "./" to the unclean path would | |||||
| // result in "././io", and match("././io") returns false. | |||||
| path = filepath.Clean(path) | |||||
| } | |||||
| // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". | |||||
| _, elem := filepath.Split(path) | |||||
| dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." | |||||
| if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { | |||||
| return filepath.SkipDir | |||||
| } | |||||
| name := prefix + filepath.ToSlash(path) | |||||
| if !match(name) { | |||||
| return nil | |||||
| } | |||||
| if _, err = build.ImportDir(path, 0); err != nil { | |||||
| if _, noGo := err.(*build.NoGoError); !noGo { | |||||
| log.Print(err) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| pkgs = append(pkgs, name) | |||||
| return nil | |||||
| }) | |||||
| return pkgs | |||||
| } | |||||
| @@ -0,0 +1,21 @@ | |||||
| MIT License | |||||
| Copyright (c) 2018 Minko Gechev | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | |||||
| of this software and associated documentation files (the "Software"), to deal | |||||
| in the Software without restriction, including without limitation the rights | |||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||||
| copies of the Software, and to permit persons to whom the Software is | |||||
| furnished to do so, subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||||
| SOFTWARE. | |||||
| @@ -0,0 +1,76 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "bytes" | |||||
| "encoding/xml" | |||||
| "github.com/mgechev/revive/lint" | |||||
| plainTemplate "text/template" | |||||
| ) | |||||
| // Checkstyle is an implementation of the Formatter interface | |||||
| // which formats the errors to Checkstyle-like format. | |||||
| type Checkstyle struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Checkstyle) Name() string { | |||||
| return "checkstyle" | |||||
| } | |||||
| type issue struct { | |||||
| Line int | |||||
| Col int | |||||
| What string | |||||
| Confidence float64 | |||||
| Severity lint.Severity | |||||
| RuleName string | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { | |||||
| var issues = map[string][]issue{} | |||||
| for failure := range failures { | |||||
| buf := new(bytes.Buffer) | |||||
| xml.Escape(buf, []byte(failure.Failure)) | |||||
| what := buf.String() | |||||
| iss := issue{ | |||||
| Line: failure.Position.Start.Line, | |||||
| Col: failure.Position.Start.Column, | |||||
| What: what, | |||||
| Confidence: failure.Confidence, | |||||
| Severity: severity(config, failure), | |||||
| RuleName: failure.RuleName, | |||||
| } | |||||
| fn := failure.GetFilename() | |||||
| if issues[fn] == nil { | |||||
| issues[fn] = make([]issue, 0) | |||||
| } | |||||
| issues[fn] = append(issues[fn], iss) | |||||
| } | |||||
| t, err := plainTemplate.New("revive").Parse(checkstyleTemplate) | |||||
| if err != nil { | |||||
| return "", err | |||||
| } | |||||
| buf := new(bytes.Buffer) | |||||
| err = t.Execute(buf, issues) | |||||
| if err != nil { | |||||
| return "", err | |||||
| } | |||||
| return buf.String(), nil | |||||
| } | |||||
| const checkstyleTemplate = `<?xml version='1.0' encoding='UTF-8'?> | |||||
| <checkstyle version="5.0"> | |||||
| {{- range $k, $v := . }} | |||||
| <file name="{{ $k }}"> | |||||
| {{- range $i, $issue := $v }} | |||||
| <error line="{{ $issue.Line }}" column="{{ $issue.Col }}" message="{{ $issue.What }} (confidence {{ $issue.Confidence}})" severity="{{ $issue.Severity }}" source="revive/{{ $issue.RuleName }}"/> | |||||
| {{- end }} | |||||
| </file> | |||||
| {{- end }} | |||||
| </checkstyle>` | |||||
| @@ -0,0 +1,26 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // Default is an implementation of the Formatter interface | |||||
| // which formats the errors to text. | |||||
| type Default struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Default) Name() string { | |||||
| return "default" | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { | |||||
| for failure := range failures { | |||||
| fmt.Printf("%v: %s\n", failure.Position.Start, failure.Failure) | |||||
| } | |||||
| return "", nil | |||||
| } | |||||
| @@ -0,0 +1,146 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "bytes" | |||||
| "fmt" | |||||
| "sort" | |||||
| "github.com/fatih/color" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "github.com/olekukonko/tablewriter" | |||||
| ) | |||||
| var ( | |||||
| errorEmoji = color.RedString("✘") | |||||
| warningEmoji = color.YellowString("⚠") | |||||
| ) | |||||
| var newLines = map[rune]bool{ | |||||
| 0x000A: true, | |||||
| 0x000B: true, | |||||
| 0x000C: true, | |||||
| 0x000D: true, | |||||
| 0x0085: true, | |||||
| 0x2028: true, | |||||
| 0x2029: true, | |||||
| } | |||||
| // Friendly is an implementation of the Formatter interface | |||||
| // which formats the errors to JSON. | |||||
| type Friendly struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Friendly) Name() string { | |||||
| return "friendly" | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { | |||||
| errorMap := map[string]int{} | |||||
| warningMap := map[string]int{} | |||||
| totalErrors := 0 | |||||
| totalWarnings := 0 | |||||
| for failure := range failures { | |||||
| sev := severity(config, failure) | |||||
| f.printFriendlyFailure(failure, sev) | |||||
| if sev == lint.SeverityWarning { | |||||
| warningMap[failure.RuleName] = warningMap[failure.RuleName] + 1 | |||||
| totalWarnings++ | |||||
| } | |||||
| if sev == lint.SeverityError { | |||||
| errorMap[failure.RuleName] = errorMap[failure.RuleName] + 1 | |||||
| totalErrors++ | |||||
| } | |||||
| } | |||||
| f.printSummary(totalErrors, totalWarnings) | |||||
| f.printStatistics(color.RedString("Errors:"), errorMap) | |||||
| f.printStatistics(color.YellowString("Warnings:"), warningMap) | |||||
| return "", nil | |||||
| } | |||||
| func (f *Friendly) printFriendlyFailure(failure lint.Failure, severity lint.Severity) { | |||||
| f.printHeaderRow(failure, severity) | |||||
| f.printFilePosition(failure) | |||||
| fmt.Println() | |||||
| fmt.Println() | |||||
| } | |||||
| func (f *Friendly) printHeaderRow(failure lint.Failure, severity lint.Severity) { | |||||
| emoji := warningEmoji | |||||
| if severity == lint.SeverityError { | |||||
| emoji = errorEmoji | |||||
| } | |||||
| fmt.Print(f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}})) | |||||
| } | |||||
| func (f *Friendly) printFilePosition(failure lint.Failure) { | |||||
| fmt.Printf(" %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column) | |||||
| } | |||||
| type statEntry struct { | |||||
| name string | |||||
| failures int | |||||
| } | |||||
| func (f *Friendly) printSummary(errors, warnings int) { | |||||
| emoji := warningEmoji | |||||
| if errors > 0 { | |||||
| emoji = errorEmoji | |||||
| } | |||||
| problemsLabel := "problems" | |||||
| if errors+warnings == 1 { | |||||
| problemsLabel = "problem" | |||||
| } | |||||
| warningsLabel := "warnings" | |||||
| if warnings == 1 { | |||||
| warningsLabel = "warning" | |||||
| } | |||||
| errorsLabel := "errors" | |||||
| if errors == 1 { | |||||
| errorsLabel = "error" | |||||
| } | |||||
| str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel) | |||||
| if errors > 0 { | |||||
| fmt.Printf("%s %s\n", emoji, color.RedString(str)) | |||||
| fmt.Println() | |||||
| return | |||||
| } | |||||
| if warnings > 0 { | |||||
| fmt.Printf("%s %s\n", emoji, color.YellowString(str)) | |||||
| fmt.Println() | |||||
| return | |||||
| } | |||||
| } | |||||
| func (f *Friendly) printStatistics(header string, stats map[string]int) { | |||||
| if len(stats) == 0 { | |||||
| return | |||||
| } | |||||
| var data []statEntry | |||||
| for name, total := range stats { | |||||
| data = append(data, statEntry{name, total}) | |||||
| } | |||||
| sort.Slice(data, func(i, j int) bool { | |||||
| return data[i].failures > data[j].failures | |||||
| }) | |||||
| formatted := [][]string{} | |||||
| for _, entry := range data { | |||||
| formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name}) | |||||
| } | |||||
| fmt.Println(header) | |||||
| fmt.Println(f.table(formatted)) | |||||
| } | |||||
| func (f *Friendly) table(rows [][]string) string { | |||||
| buf := new(bytes.Buffer) | |||||
| table := tablewriter.NewWriter(buf) | |||||
| table.SetBorder(false) | |||||
| table.SetColumnSeparator("") | |||||
| table.SetRowSeparator("") | |||||
| table.SetAutoWrapText(false) | |||||
| table.AppendBulk(rows) | |||||
| table.Render() | |||||
| return buf.String() | |||||
| } | |||||
| @@ -0,0 +1,40 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "encoding/json" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // JSON is an implementation of the Formatter interface | |||||
| // which formats the errors to JSON. | |||||
| type JSON struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *JSON) Name() string { | |||||
| return "json" | |||||
| } | |||||
| // jsonObject defines a JSON object of an failure | |||||
| type jsonObject struct { | |||||
| Severity lint.Severity | |||||
| lint.Failure `json:",inline"` | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *JSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { | |||||
| var slice []jsonObject | |||||
| for failure := range failures { | |||||
| obj := jsonObject{} | |||||
| obj.Severity = severity(config, failure) | |||||
| obj.Failure = failure | |||||
| slice = append(slice, obj) | |||||
| } | |||||
| result, err := json.Marshal(slice) | |||||
| if err != nil { | |||||
| return "", err | |||||
| } | |||||
| return string(result), err | |||||
| } | |||||
| @@ -0,0 +1,34 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "encoding/json" | |||||
| "os" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // NDJSON is an implementation of the Formatter interface | |||||
| // which formats the errors to NDJSON stream. | |||||
| type NDJSON struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *NDJSON) Name() string { | |||||
| return "ndjson" | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { | |||||
| enc := json.NewEncoder(os.Stdout) | |||||
| for failure := range failures { | |||||
| obj := jsonObject{} | |||||
| obj.Severity = severity(config, failure) | |||||
| obj.Failure = failure | |||||
| err := enc.Encode(obj) | |||||
| if err != nil { | |||||
| return "", err | |||||
| } | |||||
| } | |||||
| return "", nil | |||||
| } | |||||
| @@ -0,0 +1,26 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // Plain is an implementation of the Formatter interface | |||||
| // which formats the errors to JSON. | |||||
| type Plain struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Plain) Name() string { | |||||
| return "plain" | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { | |||||
| for failure := range failures { | |||||
| fmt.Printf("%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName) | |||||
| } | |||||
| return "", nil | |||||
| } | |||||
| @@ -0,0 +1,13 @@ | |||||
| package formatter | |||||
| import "github.com/mgechev/revive/lint" | |||||
| func severity(config lint.Config, failure lint.Failure) lint.Severity { | |||||
| if config, ok := config.Rules[failure.RuleName]; ok && config.Severity == lint.SeverityError { | |||||
| return lint.SeverityError | |||||
| } | |||||
| if config, ok := config.Directives[failure.RuleName]; ok && config.Severity == lint.SeverityError { | |||||
| return lint.SeverityError | |||||
| } | |||||
| return lint.SeverityWarning | |||||
| } | |||||
| @@ -0,0 +1,89 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "bytes" | |||||
| "fmt" | |||||
| "github.com/fatih/color" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "github.com/olekukonko/tablewriter" | |||||
| ) | |||||
| // Stylish is an implementation of the Formatter interface | |||||
| // which formats the errors to JSON. | |||||
| type Stylish struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Stylish) Name() string { | |||||
| return "stylish" | |||||
| } | |||||
| func formatFailure(failure lint.Failure, severity lint.Severity) []string { | |||||
| fString := color.CyanString(failure.Failure) | |||||
| fName := color.RedString("https://revive.run/r#" + failure.RuleName) | |||||
| lineColumn := failure.Position | |||||
| pos := fmt.Sprintf("(%d, %d)", lineColumn.Start.Line, lineColumn.Start.Column) | |||||
| if severity == lint.SeverityWarning { | |||||
| fName = color.YellowString("https://revive.run/r#" + failure.RuleName) | |||||
| } | |||||
| return []string{failure.GetFilename(), pos, fName, fString} | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { | |||||
| var result [][]string | |||||
| var totalErrors = 0 | |||||
| var total = 0 | |||||
| for f := range failures { | |||||
| total++ | |||||
| currentType := severity(config, f) | |||||
| if currentType == lint.SeverityError { | |||||
| totalErrors++ | |||||
| } | |||||
| result = append(result, formatFailure(f, lint.Severity(currentType))) | |||||
| } | |||||
| ps := "problems" | |||||
| if total == 1 { | |||||
| ps = "problem" | |||||
| } | |||||
| fileReport := make(map[string][][]string) | |||||
| for _, row := range result { | |||||
| if _, ok := fileReport[row[0]]; !ok { | |||||
| fileReport[row[0]] = [][]string{} | |||||
| } | |||||
| fileReport[row[0]] = append(fileReport[row[0]], []string{row[1], row[2], row[3]}) | |||||
| } | |||||
| output := "" | |||||
| for filename, val := range fileReport { | |||||
| buf := new(bytes.Buffer) | |||||
| table := tablewriter.NewWriter(buf) | |||||
| table.SetBorder(false) | |||||
| table.SetColumnSeparator("") | |||||
| table.SetRowSeparator("") | |||||
| table.SetAutoWrapText(false) | |||||
| table.AppendBulk(val) | |||||
| table.Render() | |||||
| c := color.New(color.Underline) | |||||
| output += c.SprintfFunc()(filename + "\n") | |||||
| output += buf.String() + "\n" | |||||
| } | |||||
| suffix := fmt.Sprintf(" %d %s (%d errors) (%d warnings)", total, ps, totalErrors, total-totalErrors) | |||||
| if total > 0 && totalErrors > 0 { | |||||
| suffix = color.RedString("\n ✖" + suffix) | |||||
| } else if total > 0 && totalErrors == 0 { | |||||
| suffix = color.YellowString("\n ✖" + suffix) | |||||
| } else { | |||||
| suffix, output = "", "" | |||||
| } | |||||
| return output + suffix, nil | |||||
| } | |||||
| @@ -0,0 +1,27 @@ | |||||
| package formatter | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // Unix is an implementation of the Formatter interface | |||||
| // which formats the errors to a simple line based error format | |||||
| // main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...) | |||||
| type Unix struct { | |||||
| Metadata lint.FormatterMetadata | |||||
| } | |||||
| // Name returns the name of the formatter | |||||
| func (f *Unix) Name() string { | |||||
| return "unix" | |||||
| } | |||||
| // Format formats the failures gotten from the lint. | |||||
| func (f *Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { | |||||
| for failure := range failures { | |||||
| fmt.Printf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure) | |||||
| } | |||||
| return "", nil | |||||
| } | |||||
| @@ -0,0 +1,32 @@ | |||||
| package lint | |||||
| // Arguments is type used for the arguments of a rule. | |||||
| type Arguments = []interface{} | |||||
| // RuleConfig is type used for the rule configuration. | |||||
| type RuleConfig struct { | |||||
| Arguments Arguments | |||||
| Severity Severity | |||||
| } | |||||
| // RulesConfig defines the config for all rules. | |||||
| type RulesConfig = map[string]RuleConfig | |||||
| // DirectiveConfig is type used for the linter directive configuration. | |||||
| type DirectiveConfig struct { | |||||
| Severity Severity | |||||
| } | |||||
| // DirectivesConfig defines the config for all directives. | |||||
| type DirectivesConfig = map[string]DirectiveConfig | |||||
| // Config defines the config of the linter. | |||||
| type Config struct { | |||||
| IgnoreGeneratedHeader bool `toml:"ignoreGeneratedHeader"` | |||||
| Confidence float64 | |||||
| Severity Severity | |||||
| Rules RulesConfig `toml:"rule"` | |||||
| ErrorCode int `toml:"errorCode"` | |||||
| WarningCode int `toml:"warningCode"` | |||||
| Directives DirectivesConfig `toml:"directive"` | |||||
| } | |||||
| @@ -0,0 +1,39 @@ | |||||
| package lint | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| ) | |||||
| const ( | |||||
| // SeverityWarning declares failures of type warning | |||||
| SeverityWarning = "warning" | |||||
| // SeverityError declares failures of type error. | |||||
| SeverityError = "error" | |||||
| ) | |||||
| // Severity is the type for the failure types. | |||||
| type Severity string | |||||
| // FailurePosition returns the failure position | |||||
| type FailurePosition struct { | |||||
| Start token.Position | |||||
| End token.Position | |||||
| } | |||||
| // Failure defines a struct for a linting failure. | |||||
| type Failure struct { | |||||
| Failure string | |||||
| RuleName string | |||||
| Category string | |||||
| Position FailurePosition | |||||
| Node ast.Node `json:"-"` | |||||
| Confidence float64 | |||||
| // For future use | |||||
| ReplacementLine string | |||||
| } | |||||
| // GetFilename returns the filename. | |||||
| func (f *Failure) GetFilename() string { | |||||
| return f.Position.Start.Filename | |||||
| } | |||||
| @@ -0,0 +1,278 @@ | |||||
| package lint | |||||
| import ( | |||||
| "bytes" | |||||
| "go/ast" | |||||
| "go/parser" | |||||
| "go/printer" | |||||
| "go/token" | |||||
| "go/types" | |||||
| "math" | |||||
| "regexp" | |||||
| "strings" | |||||
| ) | |||||
| // File abstraction used for representing files. | |||||
| type File struct { | |||||
| Name string | |||||
| Pkg *Package | |||||
| content []byte | |||||
| AST *ast.File | |||||
| } | |||||
| // IsTest returns if the file contains tests. | |||||
| func (f *File) IsTest() bool { return strings.HasSuffix(f.Name, "_test.go") } | |||||
| // Content returns the file's content. | |||||
| func (f *File) Content() []byte { | |||||
| return f.content | |||||
| } | |||||
| // NewFile creates a new file | |||||
| func NewFile(name string, content []byte, pkg *Package) (*File, error) { | |||||
| f, err := parser.ParseFile(pkg.fset, name, content, parser.ParseComments) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return &File{ | |||||
| Name: name, | |||||
| content: content, | |||||
| Pkg: pkg, | |||||
| AST: f, | |||||
| }, nil | |||||
| } | |||||
| // ToPosition returns line and column for given position. | |||||
| func (f *File) ToPosition(pos token.Pos) token.Position { | |||||
| return f.Pkg.fset.Position(pos) | |||||
| } | |||||
| // Render renters a node. | |||||
| func (f *File) Render(x interface{}) string { | |||||
| var buf bytes.Buffer | |||||
| if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil { | |||||
| panic(err) | |||||
| } | |||||
| return buf.String() | |||||
| } | |||||
| // CommentMap builds a comment map for the file. | |||||
| func (f *File) CommentMap() ast.CommentMap { | |||||
| return ast.NewCommentMap(f.Pkg.fset, f.AST, f.AST.Comments) | |||||
| } | |||||
| var basicTypeKinds = map[types.BasicKind]string{ | |||||
| types.UntypedBool: "bool", | |||||
| types.UntypedInt: "int", | |||||
| types.UntypedRune: "rune", | |||||
| types.UntypedFloat: "float64", | |||||
| types.UntypedComplex: "complex128", | |||||
| types.UntypedString: "string", | |||||
| } | |||||
| // IsUntypedConst reports whether expr is an untyped constant, | |||||
| // and indicates what its default type is. | |||||
| // scope may be nil. | |||||
| func (f *File) IsUntypedConst(expr ast.Expr) (defType string, ok bool) { | |||||
| // Re-evaluate expr outside of its context to see if it's untyped. | |||||
| // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) | |||||
| exprStr := f.Render(expr) | |||||
| tv, err := types.Eval(f.Pkg.fset, f.Pkg.TypesPkg, expr.Pos(), exprStr) | |||||
| if err != nil { | |||||
| return "", false | |||||
| } | |||||
| if b, ok := tv.Type.(*types.Basic); ok { | |||||
| if dt, ok := basicTypeKinds[b.Kind()]; ok { | |||||
| return dt, true | |||||
| } | |||||
| } | |||||
| return "", false | |||||
| } | |||||
| func (f *File) isMain() bool { | |||||
| if f.AST.Name.Name == "main" { | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| const directiveSpecifyDisableReason = "specify-disable-reason" | |||||
| func (f *File) lint(rules []Rule, config Config, failures chan Failure) { | |||||
| rulesConfig := config.Rules | |||||
| _, mustSpecifyDisableReason := config.Directives[directiveSpecifyDisableReason] | |||||
| disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures) | |||||
| for _, currentRule := range rules { | |||||
| ruleConfig := rulesConfig[currentRule.Name()] | |||||
| currentFailures := currentRule.Apply(f, ruleConfig.Arguments) | |||||
| for idx, failure := range currentFailures { | |||||
| if failure.RuleName == "" { | |||||
| failure.RuleName = currentRule.Name() | |||||
| } | |||||
| if failure.Node != nil { | |||||
| failure.Position = ToFailurePosition(failure.Node.Pos(), failure.Node.End(), f) | |||||
| } | |||||
| currentFailures[idx] = failure | |||||
| } | |||||
| currentFailures = f.filterFailures(currentFailures, disabledIntervals) | |||||
| for _, failure := range currentFailures { | |||||
| if failure.Confidence >= config.Confidence { | |||||
| failures <- failure | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| type enableDisableConfig struct { | |||||
| enabled bool | |||||
| position int | |||||
| } | |||||
| const directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$` | |||||
| const directivePos = 1 | |||||
| const modifierPos = 2 | |||||
| const rulesPos = 3 | |||||
| const reasonPos = 4 | |||||
| var re = regexp.MustCompile(directiveRE) | |||||
| func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, failures chan Failure) disabledIntervalsMap { | |||||
| enabledDisabledRulesMap := make(map[string][]enableDisableConfig) | |||||
| getEnabledDisabledIntervals := func() disabledIntervalsMap { | |||||
| result := make(disabledIntervalsMap) | |||||
| for ruleName, disabledArr := range enabledDisabledRulesMap { | |||||
| ruleResult := []DisabledInterval{} | |||||
| for i := 0; i < len(disabledArr); i++ { | |||||
| interval := DisabledInterval{ | |||||
| RuleName: ruleName, | |||||
| From: token.Position{ | |||||
| Filename: f.Name, | |||||
| Line: disabledArr[i].position, | |||||
| }, | |||||
| To: token.Position{ | |||||
| Filename: f.Name, | |||||
| Line: math.MaxInt32, | |||||
| }, | |||||
| } | |||||
| if i%2 == 0 { | |||||
| ruleResult = append(ruleResult, interval) | |||||
| } else { | |||||
| ruleResult[len(ruleResult)-1].To.Line = disabledArr[i].position | |||||
| } | |||||
| } | |||||
| result[ruleName] = ruleResult | |||||
| } | |||||
| return result | |||||
| } | |||||
| handleConfig := func(isEnabled bool, line int, name string) { | |||||
| existing, ok := enabledDisabledRulesMap[name] | |||||
| if !ok { | |||||
| existing = []enableDisableConfig{} | |||||
| enabledDisabledRulesMap[name] = existing | |||||
| } | |||||
| if (len(existing) > 1 && existing[len(existing)-1].enabled == isEnabled) || | |||||
| (len(existing) == 0 && isEnabled) { | |||||
| return | |||||
| } | |||||
| existing = append(existing, enableDisableConfig{ | |||||
| enabled: isEnabled, | |||||
| position: line, | |||||
| }) | |||||
| enabledDisabledRulesMap[name] = existing | |||||
| } | |||||
| handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval { | |||||
| var result []DisabledInterval | |||||
| for _, name := range ruleNames { | |||||
| if modifier == "line" { | |||||
| handleConfig(isEnabled, line, name) | |||||
| handleConfig(!isEnabled, line, name) | |||||
| } else if modifier == "next-line" { | |||||
| handleConfig(isEnabled, line+1, name) | |||||
| handleConfig(!isEnabled, line+1, name) | |||||
| } else { | |||||
| handleConfig(isEnabled, line, name) | |||||
| } | |||||
| } | |||||
| return result | |||||
| } | |||||
| handleComment := func(filename string, c *ast.CommentGroup, line int) { | |||||
| comments := c.List | |||||
| for _, c := range comments { | |||||
| match := re.FindStringSubmatch(c.Text) | |||||
| if len(match) == 0 { | |||||
| return | |||||
| } | |||||
| ruleNames := []string{} | |||||
| tempNames := strings.Split(match[rulesPos], ",") | |||||
| for _, name := range tempNames { | |||||
| name = strings.Trim(name, "\n") | |||||
| if len(name) > 0 { | |||||
| ruleNames = append(ruleNames, name) | |||||
| } | |||||
| } | |||||
| mustCheckDisablingReason := mustSpecifyDisableReason && match[directivePos] == "disable" | |||||
| if mustCheckDisablingReason && strings.Trim(match[reasonPos], " ") == "" { | |||||
| failures <- Failure{ | |||||
| Confidence: 1, | |||||
| RuleName: directiveSpecifyDisableReason, | |||||
| Failure: "reason of lint disabling not found", | |||||
| Position: ToFailurePosition(c.Pos(), c.End(), f), | |||||
| Node: c, | |||||
| } | |||||
| continue // skip this linter disabling directive | |||||
| } | |||||
| // TODO: optimize | |||||
| if len(ruleNames) == 0 { | |||||
| for _, rule := range rules { | |||||
| ruleNames = append(ruleNames, rule.Name()) | |||||
| } | |||||
| } | |||||
| handleRules(filename, match[modifierPos], match[directivePos] == "enable", line, ruleNames) | |||||
| } | |||||
| } | |||||
| comments := f.AST.Comments | |||||
| for _, c := range comments { | |||||
| handleComment(f.Name, c, f.ToPosition(c.End()).Line) | |||||
| } | |||||
| return getEnabledDisabledIntervals() | |||||
| } | |||||
| func (f *File) filterFailures(failures []Failure, disabledIntervals disabledIntervalsMap) []Failure { | |||||
| result := []Failure{} | |||||
| for _, failure := range failures { | |||||
| fStart := failure.Position.Start.Line | |||||
| fEnd := failure.Position.End.Line | |||||
| intervals, ok := disabledIntervals[failure.RuleName] | |||||
| if !ok { | |||||
| result = append(result, failure) | |||||
| } else { | |||||
| include := true | |||||
| for _, interval := range intervals { | |||||
| intStart := interval.From.Line | |||||
| intEnd := interval.To.Line | |||||
| if (fStart >= intStart && fStart <= intEnd) || | |||||
| (fEnd >= intStart && fEnd <= intEnd) { | |||||
| include = false | |||||
| break | |||||
| } | |||||
| } | |||||
| if include { | |||||
| result = append(result, failure) | |||||
| } | |||||
| } | |||||
| } | |||||
| return result | |||||
| } | |||||
| @@ -0,0 +1,14 @@ | |||||
| package lint | |||||
| // FormatterMetadata configuration of a formatter | |||||
| type FormatterMetadata struct { | |||||
| Name string | |||||
| Description string | |||||
| Sample string | |||||
| } | |||||
| // Formatter defines an interface for failure formatters | |||||
| type Formatter interface { | |||||
| Format(<-chan Failure, Config) (string, error) | |||||
| Name() string | |||||
| } | |||||
| @@ -0,0 +1,99 @@ | |||||
| package lint | |||||
| import ( | |||||
| "bufio" | |||||
| "bytes" | |||||
| "fmt" | |||||
| "go/token" | |||||
| "os" | |||||
| "sync" | |||||
| ) | |||||
| // ReadFile defines an abstraction for reading files. | |||||
| type ReadFile func(path string) (result []byte, err error) | |||||
| type disabledIntervalsMap = map[string][]DisabledInterval | |||||
| // Linter is used for linting set of files. | |||||
| type Linter struct { | |||||
| reader ReadFile | |||||
| } | |||||
| // New creates a new Linter | |||||
| func New(reader ReadFile) Linter { | |||||
| return Linter{reader: reader} | |||||
| } | |||||
| var ( | |||||
| genHdr = []byte("// Code generated ") | |||||
| genFtr = []byte(" DO NOT EDIT.") | |||||
| ) | |||||
| // Lint lints a set of files with the specified rule. | |||||
| func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-chan Failure, error) { | |||||
| failures := make(chan Failure) | |||||
| var wg sync.WaitGroup | |||||
| for _, pkg := range packages { | |||||
| wg.Add(1) | |||||
| go func(pkg []string) { | |||||
| if err := l.lintPackage(pkg, ruleSet, config, failures); err != nil { | |||||
| fmt.Fprintln(os.Stderr, err) | |||||
| os.Exit(1) | |||||
| } | |||||
| defer wg.Done() | |||||
| }(pkg) | |||||
| } | |||||
| go func() { | |||||
| wg.Wait() | |||||
| close(failures) | |||||
| }() | |||||
| return failures, nil | |||||
| } | |||||
| func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, failures chan Failure) error { | |||||
| pkg := &Package{ | |||||
| fset: token.NewFileSet(), | |||||
| files: map[string]*File{}, | |||||
| mu: sync.Mutex{}, | |||||
| } | |||||
| for _, filename := range filenames { | |||||
| content, err := l.reader(filename) | |||||
| if err != nil { | |||||
| return err | |||||
| } | |||||
| if isGenerated(content) && !config.IgnoreGeneratedHeader { | |||||
| continue | |||||
| } | |||||
| file, err := NewFile(filename, content, pkg) | |||||
| if err != nil { | |||||
| return err | |||||
| } | |||||
| pkg.files[filename] = file | |||||
| } | |||||
| if len(pkg.files) == 0 { | |||||
| return nil | |||||
| } | |||||
| pkg.lint(ruleSet, config, failures) | |||||
| return nil | |||||
| } | |||||
| // isGenerated reports whether the source file is generated code | |||||
| // according the rules from https://golang.org/s/generatedcode. | |||||
| // This is inherited from the original go lint. | |||||
| func isGenerated(src []byte) bool { | |||||
| sc := bufio.NewScanner(bytes.NewReader(src)) | |||||
| for sc.Scan() { | |||||
| b := sc.Bytes() | |||||
| if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| @@ -0,0 +1,178 @@ | |||||
| package lint | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "go/types" | |||||
| "sync" | |||||
| "golang.org/x/tools/go/gcexportdata" | |||||
| ) | |||||
| // Package represents a package in the project. | |||||
| type Package struct { | |||||
| fset *token.FileSet | |||||
| files map[string]*File | |||||
| TypesPkg *types.Package | |||||
| TypesInfo *types.Info | |||||
| // sortable is the set of types in the package that implement sort.Interface. | |||||
| Sortable map[string]bool | |||||
| // main is whether this is a "main" package. | |||||
| main int | |||||
| mu sync.Mutex | |||||
| } | |||||
| var newImporter = func(fset *token.FileSet) types.ImporterFrom { | |||||
| return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) | |||||
| } | |||||
| var ( | |||||
| trueValue = 1 | |||||
| falseValue = 2 | |||||
| notSet = 3 | |||||
| ) | |||||
| // IsMain returns if that's the main package. | |||||
| func (p *Package) IsMain() bool { | |||||
| if p.main == trueValue { | |||||
| return true | |||||
| } else if p.main == falseValue { | |||||
| return false | |||||
| } | |||||
| for _, f := range p.files { | |||||
| if f.isMain() { | |||||
| p.main = trueValue | |||||
| return true | |||||
| } | |||||
| } | |||||
| p.main = falseValue | |||||
| return false | |||||
| } | |||||
| // TypeCheck performs type checking for given package. | |||||
| func (p *Package) TypeCheck() error { | |||||
| p.mu.Lock() | |||||
| // If type checking has already been performed | |||||
| // skip it. | |||||
| if p.TypesInfo != nil || p.TypesPkg != nil { | |||||
| p.mu.Unlock() | |||||
| return nil | |||||
| } | |||||
| config := &types.Config{ | |||||
| // By setting a no-op error reporter, the type checker does as much work as possible. | |||||
| Error: func(error) {}, | |||||
| Importer: newImporter(p.fset), | |||||
| } | |||||
| info := &types.Info{ | |||||
| Types: make(map[ast.Expr]types.TypeAndValue), | |||||
| Defs: make(map[*ast.Ident]types.Object), | |||||
| Uses: make(map[*ast.Ident]types.Object), | |||||
| Scopes: make(map[ast.Node]*types.Scope), | |||||
| } | |||||
| var anyFile *File | |||||
| var astFiles []*ast.File | |||||
| for _, f := range p.files { | |||||
| anyFile = f | |||||
| astFiles = append(astFiles, f.AST) | |||||
| } | |||||
| typesPkg, err := check(config, anyFile.AST.Name.Name, p.fset, astFiles, info) | |||||
| // Remember the typechecking info, even if config.Check failed, | |||||
| // since we will get partial information. | |||||
| p.TypesPkg = typesPkg | |||||
| p.TypesInfo = info | |||||
| p.mu.Unlock() | |||||
| return err | |||||
| } | |||||
| // check function encapsulates the call to go/types.Config.Check method and | |||||
| // recovers if the called method panics (see issue #59) | |||||
| func check(config *types.Config, n string, fset *token.FileSet, astFiles []*ast.File, info *types.Info) (p *types.Package, err error) { | |||||
| defer func() { | |||||
| if r := recover(); r != nil { | |||||
| err, _ = r.(error) | |||||
| p = nil | |||||
| return | |||||
| } | |||||
| }() | |||||
| return config.Check(n, fset, astFiles, info) | |||||
| } | |||||
| // TypeOf returns the type of an expression. | |||||
| func (p *Package) TypeOf(expr ast.Expr) types.Type { | |||||
| if p.TypesInfo == nil { | |||||
| return nil | |||||
| } | |||||
| return p.TypesInfo.TypeOf(expr) | |||||
| } | |||||
| type walker struct { | |||||
| nmap map[string]int | |||||
| has map[string]int | |||||
| } | |||||
| func (w *walker) Visit(n ast.Node) ast.Visitor { | |||||
| fn, ok := n.(*ast.FuncDecl) | |||||
| if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { | |||||
| return w | |||||
| } | |||||
| // TODO(dsymonds): We could check the signature to be more precise. | |||||
| recv := receiverType(fn) | |||||
| if i, ok := w.nmap[fn.Name.Name]; ok { | |||||
| w.has[recv] |= i | |||||
| } | |||||
| return w | |||||
| } | |||||
| func (p *Package) scanSortable() { | |||||
| p.Sortable = make(map[string]bool) | |||||
| // bitfield for which methods exist on each type. | |||||
| const ( | |||||
| Len = 1 << iota | |||||
| Less | |||||
| Swap | |||||
| ) | |||||
| nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} | |||||
| has := make(map[string]int) | |||||
| for _, f := range p.files { | |||||
| ast.Walk(&walker{nmap, has}, f.AST) | |||||
| } | |||||
| for typ, ms := range has { | |||||
| if ms == Len|Less|Swap { | |||||
| p.Sortable[typ] = true | |||||
| } | |||||
| } | |||||
| } | |||||
| // receiverType returns the named type of the method receiver, sans "*", | |||||
| // or "invalid-type" if fn.Recv is ill formed. | |||||
| func receiverType(fn *ast.FuncDecl) string { | |||||
| switch e := fn.Recv.List[0].Type.(type) { | |||||
| case *ast.Ident: | |||||
| return e.Name | |||||
| case *ast.StarExpr: | |||||
| if id, ok := e.X.(*ast.Ident); ok { | |||||
| return id.Name | |||||
| } | |||||
| } | |||||
| // The parser accepts much more than just the legal forms. | |||||
| return "invalid-type" | |||||
| } | |||||
| func (p *Package) lint(rules []Rule, config Config, failures chan Failure) { | |||||
| p.scanSortable() | |||||
| var wg sync.WaitGroup | |||||
| for _, file := range p.files { | |||||
| wg.Add(1) | |||||
| go (func(file *File) { | |||||
| file.lint(rules, config, failures) | |||||
| defer wg.Done() | |||||
| })(file) | |||||
| } | |||||
| wg.Wait() | |||||
| } | |||||
| @@ -0,0 +1,31 @@ | |||||
| package lint | |||||
| import ( | |||||
| "go/token" | |||||
| ) | |||||
| // DisabledInterval contains a single disabled interval and the associated rule name. | |||||
| type DisabledInterval struct { | |||||
| From token.Position | |||||
| To token.Position | |||||
| RuleName string | |||||
| } | |||||
| // Rule defines an abstract rule interaface | |||||
| type Rule interface { | |||||
| Name() string | |||||
| Apply(*File, Arguments) []Failure | |||||
| } | |||||
| // AbstractRule defines an abstract rule. | |||||
| type AbstractRule struct { | |||||
| Failures []Failure | |||||
| } | |||||
| // ToFailurePosition returns the failure position. | |||||
| func ToFailurePosition(start token.Pos, end token.Pos, file *File) FailurePosition { | |||||
| return FailurePosition{ | |||||
| Start: file.ToPosition(start), | |||||
| End: file.ToPosition(end), | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,128 @@ | |||||
| package lint | |||||
| import ( | |||||
| "strings" | |||||
| "unicode" | |||||
| ) | |||||
| // Name returns a different name if it should be different. | |||||
| func Name(name string, whitelist, blacklist []string) (should string) { | |||||
| // Fast path for simple cases: "_" and all lowercase. | |||||
| if name == "_" { | |||||
| return name | |||||
| } | |||||
| allLower := true | |||||
| for _, r := range name { | |||||
| if !unicode.IsLower(r) { | |||||
| allLower = false | |||||
| break | |||||
| } | |||||
| } | |||||
| if allLower { | |||||
| return name | |||||
| } | |||||
| // Split camelCase at any lower->upper transition, and split on underscores. | |||||
| // Check each word for common initialisms. | |||||
| runes := []rune(name) | |||||
| w, i := 0, 0 // index of start of word, scan | |||||
| for i+1 <= len(runes) { | |||||
| eow := false // whether we hit the end of a word | |||||
| if i+1 == len(runes) { | |||||
| eow = true | |||||
| } else if runes[i+1] == '_' { | |||||
| // underscore; shift the remainder forward over any run of underscores | |||||
| eow = true | |||||
| n := 1 | |||||
| for i+n+1 < len(runes) && runes[i+n+1] == '_' { | |||||
| n++ | |||||
| } | |||||
| // Leave at most one underscore if the underscore is between two digits | |||||
| if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { | |||||
| n-- | |||||
| } | |||||
| copy(runes[i+1:], runes[i+n+1:]) | |||||
| runes = runes[:len(runes)-n] | |||||
| } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { | |||||
| // lower->non-lower | |||||
| eow = true | |||||
| } | |||||
| i++ | |||||
| if !eow { | |||||
| continue | |||||
| } | |||||
| // [w,i) is a word. | |||||
| word := string(runes[w:i]) | |||||
| ignoreInitWarnings := map[string]bool{} | |||||
| for _, i := range whitelist { | |||||
| ignoreInitWarnings[i] = true | |||||
| } | |||||
| extraInits := map[string]bool{} | |||||
| for _, i := range blacklist { | |||||
| extraInits[i] = true | |||||
| } | |||||
| if u := strings.ToUpper(word); (commonInitialisms[u] || extraInits[u]) && !ignoreInitWarnings[u] { | |||||
| // Keep consistent case, which is lowercase only at the start. | |||||
| if w == 0 && unicode.IsLower(runes[w]) { | |||||
| u = strings.ToLower(u) | |||||
| } | |||||
| // All the common initialisms are ASCII, | |||||
| // so we can replace the bytes exactly. | |||||
| copy(runes[w:], []rune(u)) | |||||
| } else if w > 0 && strings.ToLower(word) == word { | |||||
| // already all lowercase, and not the first word, so uppercase the first character. | |||||
| runes[w] = unicode.ToUpper(runes[w]) | |||||
| } | |||||
| w = i | |||||
| } | |||||
| return string(runes) | |||||
| } | |||||
| // commonInitialisms is a set of common initialisms. | |||||
| // Only add entries that are highly unlikely to be non-initialisms. | |||||
| // For instance, "ID" is fine (Freudian code is rare), but "AND" is not. | |||||
| var commonInitialisms = map[string]bool{ | |||||
| "ACL": true, | |||||
| "API": true, | |||||
| "ASCII": true, | |||||
| "CPU": true, | |||||
| "CSS": true, | |||||
| "DNS": true, | |||||
| "EOF": true, | |||||
| "GUID": true, | |||||
| "HTML": true, | |||||
| "HTTP": true, | |||||
| "HTTPS": true, | |||||
| "ID": true, | |||||
| "IP": true, | |||||
| "JSON": true, | |||||
| "LHS": true, | |||||
| "QPS": true, | |||||
| "RAM": true, | |||||
| "RHS": true, | |||||
| "RPC": true, | |||||
| "SLA": true, | |||||
| "SMTP": true, | |||||
| "SQL": true, | |||||
| "SSH": true, | |||||
| "TCP": true, | |||||
| "TLS": true, | |||||
| "TTL": true, | |||||
| "UDP": true, | |||||
| "UI": true, | |||||
| "UID": true, | |||||
| "UUID": true, | |||||
| "URI": true, | |||||
| "URL": true, | |||||
| "UTF8": true, | |||||
| "VM": true, | |||||
| "XML": true, | |||||
| "XMPP": true, | |||||
| "XSRF": true, | |||||
| "XSS": true, | |||||
| } | |||||
| @@ -0,0 +1,151 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "go/ast" | |||||
| "strconv" | |||||
| "strings" | |||||
| ) | |||||
| const ( | |||||
| defaultStrLitLimit = 2 | |||||
| kindFLOAT = "FLOAT" | |||||
| kindINT = "INT" | |||||
| kindSTRING = "STRING" | |||||
| ) | |||||
| type whiteList map[string]map[string]bool | |||||
| func newWhiteList() whiteList { | |||||
| return map[string]map[string]bool{kindINT: map[string]bool{}, kindFLOAT: map[string]bool{}, kindSTRING: map[string]bool{}} | |||||
| } | |||||
| func (wl whiteList) add(kind string, list string) { | |||||
| elems := strings.Split(list, ",") | |||||
| for _, e := range elems { | |||||
| wl[kind][e] = true | |||||
| } | |||||
| } | |||||
| // AddConstantRule lints unused params in functions. | |||||
| type AddConstantRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| strLitLimit := defaultStrLitLimit | |||||
| var whiteList = newWhiteList() | |||||
| if len(arguments) > 0 { | |||||
| args, ok := arguments[0].(map[string]interface{}) | |||||
| if !ok { | |||||
| panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0])) | |||||
| } | |||||
| for k, v := range args { | |||||
| kind := "" | |||||
| switch k { | |||||
| case "allowFloats": | |||||
| kind = kindFLOAT | |||||
| fallthrough | |||||
| case "allowInts": | |||||
| if kind == "" { | |||||
| kind = kindINT | |||||
| } | |||||
| fallthrough | |||||
| case "allowStrs": | |||||
| if kind == "" { | |||||
| kind = kindSTRING | |||||
| } | |||||
| list, ok := v.(string) | |||||
| if !ok { | |||||
| panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)) | |||||
| } | |||||
| whiteList.add(kind, list) | |||||
| case "maxLitCount": | |||||
| sl, ok := v.(string) | |||||
| if !ok { | |||||
| panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)) | |||||
| } | |||||
| limit, err := strconv.Atoi(sl) | |||||
| if err != nil { | |||||
| panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) | |||||
| } | |||||
| strLitLimit = limit | |||||
| } | |||||
| } | |||||
| } | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int, 0), strLitLimit: strLitLimit, whiteLst: whiteList} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *AddConstantRule) Name() string { | |||||
| return "add-constant" | |||||
| } | |||||
| type lintAddConstantRule struct { | |||||
| onFailure func(lint.Failure) | |||||
| strLits map[string]int | |||||
| strLitLimit int | |||||
| whiteLst whiteList | |||||
| } | |||||
| func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor { | |||||
| switch n := node.(type) { | |||||
| case *ast.GenDecl: | |||||
| return nil // skip declarations | |||||
| case *ast.BasicLit: | |||||
| switch kind := n.Kind.String(); kind { | |||||
| case kindFLOAT, kindINT: | |||||
| w.checkNumLit(kind, n) | |||||
| case kindSTRING: | |||||
| w.checkStrLit(n) | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) { | |||||
| if w.whiteLst[kindSTRING][n.Value] { | |||||
| return | |||||
| } | |||||
| count := w.strLits[n.Value] | |||||
| if count >= 0 { | |||||
| w.strLits[n.Value] = count + 1 | |||||
| if w.strLits[n.Value] > w.strLitLimit { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: n, | |||||
| Category: "style", | |||||
| Failure: fmt.Sprintf("string literal %s appears, at least, %d times, create a named constant for it", n.Value, w.strLits[n.Value]), | |||||
| }) | |||||
| w.strLits[n.Value] = -1 // mark it to avoid failing again on the same literal | |||||
| } | |||||
| } | |||||
| } | |||||
| func (w lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) { | |||||
| if w.whiteLst[kind][n.Value] { | |||||
| return | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: n, | |||||
| Category: "style", | |||||
| Failure: fmt.Sprintf("avoid magic numbers like '%s', create a named constant for it", n.Value), | |||||
| }) | |||||
| } | |||||
| @@ -0,0 +1,67 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ArgumentsLimitRule lints given else constructs. | |||||
| type ArgumentsLimitRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| if len(arguments) != 1 { | |||||
| panic(`invalid configuration for "argument-limit"`) | |||||
| } | |||||
| total, ok := arguments[0].(int64) // Alt. non panicking version | |||||
| if !ok { | |||||
| panic(`invalid value passed as argument number to the "argument-list" rule`) | |||||
| } | |||||
| var failures []lint.Failure | |||||
| walker := lintArgsNum{ | |||||
| total: int(total), | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ArgumentsLimitRule) Name() string { | |||||
| return "argument-limit" | |||||
| } | |||||
| type lintArgsNum struct { | |||||
| total int | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintArgsNum) Visit(n ast.Node) ast.Visitor { | |||||
| node, ok := n.(*ast.FuncDecl) | |||||
| if ok { | |||||
| num := 0 | |||||
| for _, l := range node.Type.Params.List { | |||||
| for range l.Names { | |||||
| num++ | |||||
| } | |||||
| } | |||||
| if num > w.total { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num), | |||||
| Node: node.Type, | |||||
| }) | |||||
| return w | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,94 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "go/types" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // AtomicRule lints given else constructs. | |||||
| type AtomicRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *AtomicRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| walker := atomic{ | |||||
| pkgTypesInfo: file.Pkg.TypesInfo, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *AtomicRule) Name() string { | |||||
| return "atomic" | |||||
| } | |||||
| type atomic struct { | |||||
| pkgTypesInfo *types.Info | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w atomic) Visit(node ast.Node) ast.Visitor { | |||||
| n, ok := node.(*ast.AssignStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if len(n.Lhs) != len(n.Rhs) { | |||||
| return nil // skip assignment sub-tree | |||||
| } | |||||
| if len(n.Lhs) == 1 && n.Tok == token.DEFINE { | |||||
| return nil // skip assignment sub-tree | |||||
| } | |||||
| for i, right := range n.Rhs { | |||||
| call, ok := right.(*ast.CallExpr) | |||||
| if !ok { | |||||
| continue | |||||
| } | |||||
| sel, ok := call.Fun.(*ast.SelectorExpr) | |||||
| if !ok { | |||||
| continue | |||||
| } | |||||
| pkgIdent, _ := sel.X.(*ast.Ident) | |||||
| if w.pkgTypesInfo != nil { | |||||
| pkgName, ok := w.pkgTypesInfo.Uses[pkgIdent].(*types.PkgName) | |||||
| if !ok || pkgName.Imported().Path() != "sync/atomic" { | |||||
| continue | |||||
| } | |||||
| } | |||||
| switch sel.Sel.Name { | |||||
| case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr": | |||||
| left := n.Lhs[i] | |||||
| if len(call.Args) != 2 { | |||||
| continue | |||||
| } | |||||
| arg := call.Args[0] | |||||
| broken := false | |||||
| if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { | |||||
| broken = gofmt(left) == gofmt(uarg.X) | |||||
| } else if star, ok := left.(*ast.StarExpr); ok { | |||||
| broken = gofmt(star.X) == gofmt(arg) | |||||
| } | |||||
| if broken { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Failure: "direct assignment to atomic value", | |||||
| Node: n, | |||||
| }) | |||||
| } | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,84 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // BareReturnRule lints given else constructs. | |||||
| type BareReturnRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *BareReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintBareReturnRule{onFailure: onFailure} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *BareReturnRule) Name() string { | |||||
| return "bare-return" | |||||
| } | |||||
| type lintBareReturnRule struct { | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintBareReturnRule) Visit(node ast.Node) ast.Visitor { | |||||
| switch n := node.(type) { | |||||
| case *ast.FuncDecl: | |||||
| w.checkFunc(n.Type.Results, n.Body) | |||||
| case *ast.FuncLit: // to cope with deferred functions and go-routines | |||||
| w.checkFunc(n.Type.Results, n.Body) | |||||
| } | |||||
| return w | |||||
| } | |||||
| // checkFunc will verify if the given function has named result and bare returns | |||||
| func (w lintBareReturnRule) checkFunc(results *ast.FieldList, body *ast.BlockStmt) { | |||||
| hasNamedResults := results != nil && len(results.List) > 0 && results.List[0].Names != nil | |||||
| if !hasNamedResults || body == nil { | |||||
| return // nothing to do | |||||
| } | |||||
| brf := bareReturnFinder{w.onFailure} | |||||
| ast.Walk(brf, body) | |||||
| } | |||||
| type bareReturnFinder struct { | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor { | |||||
| _, ok := node.(*ast.FuncLit) | |||||
| if ok { | |||||
| // skip analysing function literals | |||||
| // they will analyzed by the lintBareReturnRule.Visit method | |||||
| return nil | |||||
| } | |||||
| rs, ok := node.(*ast.ReturnStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if len(rs.Results) > 0 { | |||||
| return w | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: rs, | |||||
| Failure: "avoid using bare returns, please add return expressions", | |||||
| }) | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,74 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // BlankImportsRule lints given else constructs. | |||||
| type BlankImportsRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintBlankImports{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *BlankImportsRule) Name() string { | |||||
| return "blank-imports" | |||||
| } | |||||
| type lintBlankImports struct { | |||||
| fileAst *ast.File | |||||
| file *lint.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintBlankImports) Visit(_ ast.Node) ast.Visitor { | |||||
| // In package main and in tests, we don't complain about blank imports. | |||||
| if w.file.Pkg.IsMain() || w.file.IsTest() { | |||||
| return nil | |||||
| } | |||||
| // The first element of each contiguous group of blank imports should have | |||||
| // an explanatory comment of some kind. | |||||
| for i, imp := range w.fileAst.Imports { | |||||
| pos := w.file.ToPosition(imp.Pos()) | |||||
| if !isBlank(imp.Name) { | |||||
| continue // Ignore non-blank imports. | |||||
| } | |||||
| if i > 0 { | |||||
| prev := w.fileAst.Imports[i-1] | |||||
| prevPos := w.file.ToPosition(prev.Pos()) | |||||
| if isBlank(prev.Name) && prevPos.Line+1 == pos.Line { | |||||
| continue // A subsequent blank in a group. | |||||
| } | |||||
| } | |||||
| // This is the first blank import of a group. | |||||
| if imp.Doc == nil && imp.Comment == nil { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: imp, | |||||
| Failure: "a blank import should be only in a main or test package, or have a comment justifying it", | |||||
| Confidence: 1, | |||||
| Category: "imports", | |||||
| }) | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| @@ -0,0 +1,73 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // BoolLiteralRule warns when logic expressions contains Boolean literals. | |||||
| type BoolLiteralRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *BoolLiteralRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| astFile := file.AST | |||||
| w := &lintBoolLiteral{astFile, onFailure} | |||||
| ast.Walk(w, astFile) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *BoolLiteralRule) Name() string { | |||||
| return "bool-literal-in-expr" | |||||
| } | |||||
| type lintBoolLiteral struct { | |||||
| file *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor { | |||||
| switch n := node.(type) { | |||||
| case *ast.BinaryExpr: | |||||
| if !isBoolOp(n.Op) { | |||||
| return w | |||||
| } | |||||
| lexeme, ok := isExprABooleanLit(n.X) | |||||
| if !ok { | |||||
| lexeme, ok = isExprABooleanLit(n.Y) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| } | |||||
| isConstant := (n.Op == token.LAND && lexeme == "false") || (n.Op == token.LOR && lexeme == "true") | |||||
| if isConstant { | |||||
| w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, "logic") | |||||
| } else { | |||||
| w.addFailure(n, "omit Boolean literal in expression", "style") | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| func (w lintBoolLiteral) addFailure(node ast.Node, msg string, cat string) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: node, | |||||
| Category: cat, | |||||
| Failure: msg, | |||||
| }) | |||||
| } | |||||
| @@ -0,0 +1,70 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // CallToGCRule lints calls to the garbage collector. | |||||
| type CallToGCRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *CallToGCRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| var gcTriggeringFunctions = map[string]map[string]bool{ | |||||
| "runtime": map[string]bool{"GC": true}, | |||||
| } | |||||
| w := lintCallToGC{onFailure, gcTriggeringFunctions} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *CallToGCRule) Name() string { | |||||
| return "call-to-gc" | |||||
| } | |||||
| type lintCallToGC struct { | |||||
| onFailure func(lint.Failure) | |||||
| gcTriggeringFunctions map[string]map[string]bool | |||||
| } | |||||
| func (w lintCallToGC) Visit(node ast.Node) ast.Visitor { | |||||
| ce, ok := node.(*ast.CallExpr) | |||||
| if !ok { | |||||
| return w // nothing to do, the node is not a call | |||||
| } | |||||
| fc, ok := ce.Fun.(*ast.SelectorExpr) | |||||
| if !ok { | |||||
| return nil // nothing to do, the call is not of the form pkg.func(...) | |||||
| } | |||||
| id, ok := fc.X.(*ast.Ident) | |||||
| if !ok { | |||||
| return nil // in case X is not an id (it should be!) | |||||
| } | |||||
| fn := fc.Sel.Name | |||||
| pkg := id.Name | |||||
| if !w.gcTriggeringFunctions[pkg][fn] { | |||||
| return nil // it isn't a call to a GC triggering function | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: node, | |||||
| Category: "bad practice", | |||||
| Failure: "explicit call to the garbage collector", | |||||
| }) | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,195 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "golang.org/x/tools/go/ast/astutil" | |||||
| ) | |||||
| // CognitiveComplexityRule lints given else constructs. | |||||
| type CognitiveComplexityRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| const expectedArgumentsCount = 1 | |||||
| if len(arguments) < expectedArgumentsCount { | |||||
| panic(fmt.Sprintf("not enough arguments for cognitive-complexity, expected %d, got %d", expectedArgumentsCount, len(arguments))) | |||||
| } | |||||
| complexity, ok := arguments[0].(int64) | |||||
| if !ok { | |||||
| panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) | |||||
| } | |||||
| linter := cognitiveComplexityLinter{ | |||||
| file: file, | |||||
| maxComplexity: int(complexity), | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| linter.lint() | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *CognitiveComplexityRule) Name() string { | |||||
| return "cognitive-complexity" | |||||
| } | |||||
| type cognitiveComplexityLinter struct { | |||||
| file *lint.File | |||||
| maxComplexity int | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w cognitiveComplexityLinter) lint() { | |||||
| f := w.file | |||||
| for _, decl := range f.AST.Decls { | |||||
| if fn, ok := decl.(*ast.FuncDecl); ok { | |||||
| v := cognitiveComplexityVisitor{} | |||||
| c := v.subTreeComplexity(fn.Body) | |||||
| if c > w.maxComplexity { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Category: "maintenance", | |||||
| Failure: fmt.Sprintf("function %s has cognitive complexity %d (> max enabled %d)", funcName(fn), c, w.maxComplexity), | |||||
| Node: fn, | |||||
| }) | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| type cognitiveComplexityVisitor struct { | |||||
| complexity int | |||||
| nestingLevel int | |||||
| } | |||||
| // subTreeComplexity calculates the cognitive complexity of an AST-subtree. | |||||
| func (v cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int { | |||||
| ast.Walk(&v, n) | |||||
| return v.complexity | |||||
| } | |||||
| // Visit implements the ast.Visitor interface. | |||||
| func (v *cognitiveComplexityVisitor) Visit(n ast.Node) ast.Visitor { | |||||
| switch n := n.(type) { | |||||
| case *ast.IfStmt: | |||||
| targets := []ast.Node{n.Cond, n.Body, n.Else} | |||||
| v.walk(1, targets...) | |||||
| return nil | |||||
| case *ast.ForStmt: | |||||
| targets := []ast.Node{n.Cond, n.Body} | |||||
| v.walk(1, targets...) | |||||
| return nil | |||||
| case *ast.RangeStmt: | |||||
| v.walk(1, n.Body) | |||||
| return nil | |||||
| case *ast.SelectStmt: | |||||
| v.walk(1, n.Body) | |||||
| return nil | |||||
| case *ast.SwitchStmt: | |||||
| v.walk(1, n.Body) | |||||
| return nil | |||||
| case *ast.TypeSwitchStmt: | |||||
| v.walk(1, n.Body) | |||||
| return nil | |||||
| case *ast.FuncLit: | |||||
| v.walk(0, n.Body) // do not increment the complexity, just do the nesting | |||||
| return nil | |||||
| case *ast.BinaryExpr: | |||||
| v.complexity += v.binExpComplexity(n) | |||||
| return nil // skip visiting binexp sub-tree (already visited by binExpComplexity) | |||||
| case *ast.BranchStmt: | |||||
| if n.Label != nil { | |||||
| v.complexity += 1 | |||||
| } | |||||
| } | |||||
| // TODO handle (at least) direct recursion | |||||
| return v | |||||
| } | |||||
| func (v *cognitiveComplexityVisitor) walk(complexityIncrement int, targets ...ast.Node) { | |||||
| v.complexity += complexityIncrement + v.nestingLevel | |||||
| nesting := v.nestingLevel | |||||
| v.nestingLevel++ | |||||
| for _, t := range targets { | |||||
| if t == nil { | |||||
| continue | |||||
| } | |||||
| ast.Walk(v, t) | |||||
| } | |||||
| v.nestingLevel = nesting | |||||
| } | |||||
| func (cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int { | |||||
| calculator := binExprComplexityCalculator{opsStack: []token.Token{}} | |||||
| astutil.Apply(n, calculator.pre, calculator.post) | |||||
| return calculator.complexity | |||||
| } | |||||
| type binExprComplexityCalculator struct { | |||||
| complexity int | |||||
| opsStack []token.Token // stack of bool operators | |||||
| subexpStarted bool | |||||
| } | |||||
| func (becc *binExprComplexityCalculator) pre(c *astutil.Cursor) bool { | |||||
| switch n := c.Node().(type) { | |||||
| case *ast.BinaryExpr: | |||||
| isBoolOp := n.Op == token.LAND || n.Op == token.LOR | |||||
| if !isBoolOp { | |||||
| break | |||||
| } | |||||
| ops := len(becc.opsStack) | |||||
| // if | |||||
| // is the first boolop in the expression OR | |||||
| // is the first boolop inside a subexpression (...) OR | |||||
| // is not the same to the previous one | |||||
| // then | |||||
| // increment complexity | |||||
| if ops == 0 || becc.subexpStarted || n.Op != becc.opsStack[ops-1] { | |||||
| becc.complexity++ | |||||
| becc.subexpStarted = false | |||||
| } | |||||
| becc.opsStack = append(becc.opsStack, n.Op) | |||||
| case *ast.ParenExpr: | |||||
| becc.subexpStarted = true | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (becc *binExprComplexityCalculator) post(c *astutil.Cursor) bool { | |||||
| switch n := c.Node().(type) { | |||||
| case *ast.BinaryExpr: | |||||
| isBoolOp := n.Op == token.LAND || n.Op == token.LOR | |||||
| if !isBoolOp { | |||||
| break | |||||
| } | |||||
| ops := len(becc.opsStack) | |||||
| if ops > 0 { | |||||
| becc.opsStack = becc.opsStack[:ops-1] | |||||
| } | |||||
| case *ast.ParenExpr: | |||||
| becc.subexpStarted = false | |||||
| } | |||||
| return true | |||||
| } | |||||
| @@ -0,0 +1,190 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "strings" | |||||
| "sync" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| type referenceMethod struct { | |||||
| fileName string | |||||
| id *ast.Ident | |||||
| } | |||||
| type pkgMethods struct { | |||||
| pkg *lint.Package | |||||
| methods map[string]map[string]*referenceMethod | |||||
| mu *sync.Mutex | |||||
| } | |||||
| type packages struct { | |||||
| pkgs []pkgMethods | |||||
| mu sync.Mutex | |||||
| } | |||||
| func (ps *packages) methodNames(lp *lint.Package) pkgMethods { | |||||
| ps.mu.Lock() | |||||
| for _, pkg := range ps.pkgs { | |||||
| if pkg.pkg == lp { | |||||
| ps.mu.Unlock() | |||||
| return pkg | |||||
| } | |||||
| } | |||||
| pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}} | |||||
| ps.pkgs = append(ps.pkgs, pkgm) | |||||
| ps.mu.Unlock() | |||||
| return pkgm | |||||
| } | |||||
| var allPkgs = packages{pkgs: make([]pkgMethods, 1)} | |||||
| // ConfusingNamingRule lints method names that differ only by capitalization | |||||
| type ConfusingNamingRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ConfusingNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| pkgm := allPkgs.methodNames(file.Pkg) | |||||
| walker := lintConfusingNames{ | |||||
| fileName: file.Name, | |||||
| pkgm: pkgm, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(&walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ConfusingNamingRule) Name() string { | |||||
| return "confusing-naming" | |||||
| } | |||||
| //checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file. | |||||
| func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) { | |||||
| if id.Name == "init" && holder == defaultStructName { | |||||
| // ignore init functions | |||||
| return | |||||
| } | |||||
| pkgm := w.pkgm | |||||
| name := strings.ToUpper(id.Name) | |||||
| pkgm.mu.Lock() | |||||
| defer pkgm.mu.Unlock() | |||||
| if pkgm.methods[holder] != nil { | |||||
| if pkgm.methods[holder][name] != nil { | |||||
| refMethod := pkgm.methods[holder][name] | |||||
| // confusing names | |||||
| var kind string | |||||
| if holder == defaultStructName { | |||||
| kind = "function" | |||||
| } else { | |||||
| kind = "method" | |||||
| } | |||||
| var fileName string | |||||
| if w.fileName == refMethod.fileName { | |||||
| fileName = "the same source file" | |||||
| } else { | |||||
| fileName = refMethod.fileName | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Failure: fmt.Sprintf("Method '%s' differs only by capitalization to %s '%s' in %s", id.Name, kind, refMethod.id.Name, fileName), | |||||
| Confidence: 1, | |||||
| Node: id, | |||||
| Category: "naming", | |||||
| }) | |||||
| return | |||||
| } | |||||
| } else { | |||||
| pkgm.methods[holder] = make(map[string]*referenceMethod, 1) | |||||
| } | |||||
| // update the black list | |||||
| if pkgm.methods[holder] == nil { | |||||
| println("no entry for '", holder, "'") | |||||
| } | |||||
| pkgm.methods[holder][name] = &referenceMethod{fileName: w.fileName, id: id} | |||||
| } | |||||
| type lintConfusingNames struct { | |||||
| fileName string | |||||
| pkgm pkgMethods | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| const defaultStructName = "_" // used to map functions | |||||
| //getStructName of a function receiver. Defaults to defaultStructName | |||||
| func getStructName(r *ast.FieldList) string { | |||||
| result := defaultStructName | |||||
| if r == nil || len(r.List) < 1 { | |||||
| return result | |||||
| } | |||||
| t := r.List[0].Type | |||||
| if p, _ := t.(*ast.StarExpr); p != nil { // if a pointer receiver => dereference pointer receiver types | |||||
| t = p.X | |||||
| } | |||||
| if p, _ := t.(*ast.Ident); p != nil { | |||||
| result = p.Name | |||||
| } | |||||
| return result | |||||
| } | |||||
| func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusingNames) { | |||||
| bl := make(map[string]bool, len(fields.List)) | |||||
| for _, f := range fields.List { | |||||
| for _, id := range f.Names { | |||||
| normName := strings.ToUpper(id.Name) | |||||
| if bl[normName] { | |||||
| w.onFailure(lint.Failure{ | |||||
| Failure: fmt.Sprintf("Field '%s' differs only by capitalization to other field in the struct type %s", id.Name, structName), | |||||
| Confidence: 1, | |||||
| Node: id, | |||||
| Category: "naming", | |||||
| }) | |||||
| } else { | |||||
| bl[normName] = true | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| func (w *lintConfusingNames) Visit(n ast.Node) ast.Visitor { | |||||
| switch v := n.(type) { | |||||
| case *ast.FuncDecl: | |||||
| // Exclude naming warnings for functions that are exported to C but | |||||
| // not exported in the Go API. | |||||
| // See https://github.com/golang/lint/issues/144. | |||||
| if ast.IsExported(v.Name.Name) || !isCgoExported(v) { | |||||
| checkMethodName(getStructName(v.Recv), v.Name, w) | |||||
| } | |||||
| case *ast.TypeSpec: | |||||
| if s, ok := v.Type.(*ast.StructType); ok { | |||||
| checkStructFields(s.Fields, v.Name.Name, w) | |||||
| } | |||||
| default: | |||||
| // will add other checks like field names, struct names, etc. | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,67 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ConfusingResultsRule lints given function declarations | |||||
| type ConfusingResultsRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintConfusingResults{ | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ConfusingResultsRule) Name() string { | |||||
| return "confusing-results" | |||||
| } | |||||
| type lintConfusingResults struct { | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintConfusingResults) Visit(n ast.Node) ast.Visitor { | |||||
| fn, ok := n.(*ast.FuncDecl) | |||||
| if !ok || fn.Type.Results == nil || len(fn.Type.Results.List) < 2 { | |||||
| return w | |||||
| } | |||||
| lastType := "" | |||||
| for _, result := range fn.Type.Results.List { | |||||
| if len(result.Names) > 0 { | |||||
| return w | |||||
| } | |||||
| t, ok := result.Type.(*ast.Ident) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if t.Name == lastType { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: n, | |||||
| Confidence: 1, | |||||
| Category: "naming", | |||||
| Failure: "unnamed results of the same type may be confusing, consider using named results", | |||||
| }) | |||||
| break | |||||
| } | |||||
| lastType = t.Name | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,88 @@ | |||||
| package rule | |||||
| import ( | |||||
| "github.com/mgechev/revive/lint" | |||||
| "go/ast" | |||||
| "go/token" | |||||
| ) | |||||
| // ConstantLogicalExprRule warns on constant logical expressions. | |||||
| type ConstantLogicalExprRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| astFile := file.AST | |||||
| w := &lintConstantLogicalExpr{astFile, onFailure} | |||||
| ast.Walk(w, astFile) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ConstantLogicalExprRule) Name() string { | |||||
| return "constant-logical-expr" | |||||
| } | |||||
| type lintConstantLogicalExpr struct { | |||||
| file *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor { | |||||
| switch n := node.(type) { | |||||
| case *ast.BinaryExpr: | |||||
| if !w.isOperatorWithLogicalResult(n.Op) { | |||||
| return w | |||||
| } | |||||
| if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same | |||||
| return w | |||||
| } | |||||
| if n.Op == token.EQL { | |||||
| w.newFailure(n, "expression always evaluates to true") | |||||
| return w | |||||
| } | |||||
| if w.isInequalityOperator(n.Op) { | |||||
| w.newFailure(n, "expression always evaluates to false") | |||||
| return w | |||||
| } | |||||
| w.newFailure(n, "left and right hand-side sub-expressions are the same") | |||||
| } | |||||
| return w | |||||
| } | |||||
| func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool { | |||||
| switch t { | |||||
| case token.LAND, token.LOR, token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (w *lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool { | |||||
| switch t { | |||||
| case token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (w lintConstantLogicalExpr) newFailure(node ast.Node, msg string) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: node, | |||||
| Category: "logic", | |||||
| Failure: msg, | |||||
| }) | |||||
| } | |||||
| @@ -0,0 +1,60 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ContextAsArgumentRule lints given else constructs. | |||||
| type ContextAsArgumentRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ContextAsArgumentRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintContextArguments{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ContextAsArgumentRule) Name() string { | |||||
| return "context-as-argument" | |||||
| } | |||||
| type lintContextArguments struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { | |||||
| fn, ok := n.(*ast.FuncDecl) | |||||
| if !ok || len(fn.Type.Params.List) <= 1 { | |||||
| return w | |||||
| } | |||||
| // A context.Context should be the first parameter of a function. | |||||
| // Flag any that show up after the first. | |||||
| for _, arg := range fn.Type.Params.List[1:] { | |||||
| if isPkgDot(arg.Type, "context", "Context") { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: fn, | |||||
| Category: "arg-order", | |||||
| Failure: "context.Context should be the first parameter of a function", | |||||
| Confidence: 0.9, | |||||
| }) | |||||
| break // only flag one | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,81 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "go/types" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ContextKeysType lints given else constructs. | |||||
| type ContextKeysType struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ContextKeysType) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintContextKeyTypes{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| file.Pkg.TypeCheck() | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ContextKeysType) Name() string { | |||||
| return "context-keys-type" | |||||
| } | |||||
| type lintContextKeyTypes struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintContextKeyTypes) Visit(n ast.Node) ast.Visitor { | |||||
| switch n := n.(type) { | |||||
| case *ast.CallExpr: | |||||
| checkContextKeyType(w, n) | |||||
| } | |||||
| return w | |||||
| } | |||||
| func checkContextKeyType(w lintContextKeyTypes, x *ast.CallExpr) { | |||||
| f := w.file | |||||
| sel, ok := x.Fun.(*ast.SelectorExpr) | |||||
| if !ok { | |||||
| return | |||||
| } | |||||
| pkg, ok := sel.X.(*ast.Ident) | |||||
| if !ok || pkg.Name != "context" { | |||||
| return | |||||
| } | |||||
| if sel.Sel.Name != "WithValue" { | |||||
| return | |||||
| } | |||||
| // key is second argument to context.WithValue | |||||
| if len(x.Args) != 3 { | |||||
| return | |||||
| } | |||||
| key := f.Pkg.TypesInfo.Types[x.Args[1]] | |||||
| if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: x, | |||||
| Category: "content", | |||||
| Failure: fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type), | |||||
| }) | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,115 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // Based on https://github.com/fzipp/gocyclo | |||||
| // CyclomaticRule lints given else constructs. | |||||
| type CyclomaticRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| complexity, ok := arguments[0].(int64) // Alt. non panicking version | |||||
| if !ok { | |||||
| panic("invalid argument for cyclomatic complexity") | |||||
| } | |||||
| fileAst := file.AST | |||||
| walker := lintCyclomatic{ | |||||
| file: file, | |||||
| complexity: int(complexity), | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *CyclomaticRule) Name() string { | |||||
| return "cyclomatic" | |||||
| } | |||||
| type lintCyclomatic struct { | |||||
| file *lint.File | |||||
| complexity int | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor { | |||||
| f := w.file | |||||
| for _, decl := range f.AST.Decls { | |||||
| if fn, ok := decl.(*ast.FuncDecl); ok { | |||||
| c := complexity(fn) | |||||
| if c > w.complexity { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Category: "maintenance", | |||||
| Failure: fmt.Sprintf("function %s has cyclomatic complexity %d", funcName(fn), c), | |||||
| Node: fn, | |||||
| }) | |||||
| } | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // funcName returns the name representation of a function or method: | |||||
| // "(Type).Name" for methods or simply "Name" for functions. | |||||
| func funcName(fn *ast.FuncDecl) string { | |||||
| if fn.Recv != nil { | |||||
| if fn.Recv.NumFields() > 0 { | |||||
| typ := fn.Recv.List[0].Type | |||||
| return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) | |||||
| } | |||||
| } | |||||
| return fn.Name.Name | |||||
| } | |||||
| // recvString returns a string representation of recv of the | |||||
| // form "T", "*T", or "BADRECV" (if not a proper receiver type). | |||||
| func recvString(recv ast.Expr) string { | |||||
| switch t := recv.(type) { | |||||
| case *ast.Ident: | |||||
| return t.Name | |||||
| case *ast.StarExpr: | |||||
| return "*" + recvString(t.X) | |||||
| } | |||||
| return "BADRECV" | |||||
| } | |||||
| // complexity calculates the cyclomatic complexity of a function. | |||||
| func complexity(fn *ast.FuncDecl) int { | |||||
| v := complexityVisitor{} | |||||
| ast.Walk(&v, fn) | |||||
| return v.Complexity | |||||
| } | |||||
| type complexityVisitor struct { | |||||
| // Complexity is the cyclomatic complexity | |||||
| Complexity int | |||||
| } | |||||
| // Visit implements the ast.Visitor interface. | |||||
| func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { | |||||
| switch n := n.(type) { | |||||
| case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: | |||||
| v.Complexity++ | |||||
| case *ast.BinaryExpr: | |||||
| if n.Op == token.LAND || n.Op == token.LOR { | |||||
| v.Complexity++ | |||||
| } | |||||
| } | |||||
| return v | |||||
| } | |||||
| @@ -0,0 +1,94 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // DeepExitRule lints program exit at functions other than main or init. | |||||
| type DeepExitRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| var exitFunctions = map[string]map[string]bool{ | |||||
| "os": map[string]bool{"Exit": true}, | |||||
| "syscall": map[string]bool{"Exit": true}, | |||||
| "log": map[string]bool{ | |||||
| "Fatal": true, | |||||
| "Fatalf": true, | |||||
| "Fatalln": true, | |||||
| "Panic": true, | |||||
| "Panicf": true, | |||||
| "Panicln": true, | |||||
| }, | |||||
| } | |||||
| w := lintDeepExit{onFailure, exitFunctions, file.IsTest()} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *DeepExitRule) Name() string { | |||||
| return "deep-exit" | |||||
| } | |||||
| type lintDeepExit struct { | |||||
| onFailure func(lint.Failure) | |||||
| exitFunctions map[string]map[string]bool | |||||
| isTestFile bool | |||||
| } | |||||
| func (w lintDeepExit) Visit(node ast.Node) ast.Visitor { | |||||
| if fd, ok := node.(*ast.FuncDecl); ok { | |||||
| if w.mustIgnore(fd) { | |||||
| return nil // skip analysis of this function | |||||
| } | |||||
| return w | |||||
| } | |||||
| se, ok := node.(*ast.ExprStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| ce, ok := se.X.(*ast.CallExpr) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| fc, ok := ce.Fun.(*ast.SelectorExpr) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| id, ok := fc.X.(*ast.Ident) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| fn := fc.Sel.Name | |||||
| pkg := id.Name | |||||
| if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: ce, | |||||
| Category: "bad practice", | |||||
| Failure: fmt.Sprintf("calls to %s.%s only in main() or init() functions", pkg, fn), | |||||
| }) | |||||
| } | |||||
| return w | |||||
| } | |||||
| func (w *lintDeepExit) mustIgnore(fd *ast.FuncDecl) bool { | |||||
| fn := fd.Name.Name | |||||
| return fn == "init" || fn == "main" || (w.isTestFile && fn == "TestMain") | |||||
| } | |||||
| @@ -0,0 +1,54 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // DotImportsRule lints given else constructs. | |||||
| type DotImportsRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintImports{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *DotImportsRule) Name() string { | |||||
| return "dot-imports" | |||||
| } | |||||
| type lintImports struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintImports) Visit(_ ast.Node) ast.Visitor { | |||||
| for i, is := range w.fileAst.Imports { | |||||
| _ = i | |||||
| if is.Name != nil && is.Name.Name == "." && !w.file.IsTest() { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Failure: "should not use dot imports", | |||||
| Node: is, | |||||
| Category: "imports", | |||||
| }) | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| @@ -0,0 +1,39 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // DuplicatedImportsRule lints given else constructs. | |||||
| type DuplicatedImportsRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *DuplicatedImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| impPaths := map[string]struct{}{} | |||||
| for _, imp := range file.AST.Imports { | |||||
| path := imp.Path.Value | |||||
| _, ok := impPaths[path] | |||||
| if ok { | |||||
| failures = append(failures, lint.Failure{ | |||||
| Confidence: 1, | |||||
| Failure: fmt.Sprintf("Package %s already imported", path), | |||||
| Node: imp, | |||||
| Category: "imports", | |||||
| }) | |||||
| continue | |||||
| } | |||||
| impPaths[path] = struct{}{} | |||||
| } | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *DuplicatedImportsRule) Name() string { | |||||
| return "duplicated-imports" | |||||
| } | |||||
| @@ -0,0 +1,76 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // EmptyBlockRule lints given else constructs. | |||||
| type EmptyBlockRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *EmptyBlockRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintEmptyBlock{make([]*ast.BlockStmt, 0), onFailure} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *EmptyBlockRule) Name() string { | |||||
| return "empty-block" | |||||
| } | |||||
| type lintEmptyBlock struct { | |||||
| ignore []*ast.BlockStmt | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor { | |||||
| fd, ok := node.(*ast.FuncDecl) | |||||
| if ok { | |||||
| w.ignore = append(w.ignore, fd.Body) | |||||
| return w | |||||
| } | |||||
| fl, ok := node.(*ast.FuncLit) | |||||
| if ok { | |||||
| w.ignore = append(w.ignore, fl.Body) | |||||
| return w | |||||
| } | |||||
| block, ok := node.(*ast.BlockStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if mustIgnore(block, w.ignore) { | |||||
| return w | |||||
| } | |||||
| if len(block.List) == 0 { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: block, | |||||
| Category: "logic", | |||||
| Failure: "this block is empty, you can remove it", | |||||
| }) | |||||
| } | |||||
| return w | |||||
| } | |||||
| func mustIgnore(block *ast.BlockStmt, blackList []*ast.BlockStmt) bool { | |||||
| for _, b := range blackList { | |||||
| if b == block { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| @@ -0,0 +1,113 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // EmptyLinesRule lints empty lines in blocks. | |||||
| type EmptyLinesRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *EmptyLinesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintEmptyLines{file, file.CommentMap(), onFailure} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *EmptyLinesRule) Name() string { | |||||
| return "empty-lines" | |||||
| } | |||||
| type lintEmptyLines struct { | |||||
| file *lint.File | |||||
| cmap ast.CommentMap | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintEmptyLines) Visit(node ast.Node) ast.Visitor { | |||||
| block, ok := node.(*ast.BlockStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| w.checkStart(block) | |||||
| w.checkEnd(block) | |||||
| return w | |||||
| } | |||||
| func (w lintEmptyLines) checkStart(block *ast.BlockStmt) { | |||||
| if len(block.List) == 0 { | |||||
| return | |||||
| } | |||||
| start := w.position(block.Lbrace) | |||||
| firstNode := block.List[0] | |||||
| if w.commentBetween(start, firstNode) { | |||||
| return | |||||
| } | |||||
| first := w.position(firstNode.Pos()) | |||||
| if first.Line-start.Line > 1 { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: block, | |||||
| Category: "style", | |||||
| Failure: "extra empty line at the start of a block", | |||||
| }) | |||||
| } | |||||
| } | |||||
| func (w lintEmptyLines) checkEnd(block *ast.BlockStmt) { | |||||
| if len(block.List) < 1 { | |||||
| return | |||||
| } | |||||
| end := w.position(block.Rbrace) | |||||
| lastNode := block.List[len(block.List)-1] | |||||
| if w.commentBetween(end, lastNode) { | |||||
| return | |||||
| } | |||||
| last := w.position(lastNode.End()) | |||||
| if end.Line-last.Line > 1 { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: lastNode, | |||||
| Category: "style", | |||||
| Failure: "extra empty line at the end of a block", | |||||
| }) | |||||
| } | |||||
| } | |||||
| func (w lintEmptyLines) commentBetween(position token.Position, node ast.Node) bool { | |||||
| comments := w.cmap.Filter(node).Comments() | |||||
| if len(comments) == 0 { | |||||
| return false | |||||
| } | |||||
| for _, comment := range comments { | |||||
| start, end := w.position(comment.Pos()), w.position(comment.End()) | |||||
| if start.Line-position.Line == 1 || position.Line-end.Line == 1 { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (w lintEmptyLines) position(pos token.Pos) token.Position { | |||||
| return w.file.ToPosition(pos) | |||||
| } | |||||
| @@ -0,0 +1,79 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "strings" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ErrorNamingRule lints given else constructs. | |||||
| type ErrorNamingRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ErrorNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintErrors{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ErrorNamingRule) Name() string { | |||||
| return "error-naming" | |||||
| } | |||||
| type lintErrors struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintErrors) Visit(_ ast.Node) ast.Visitor { | |||||
| for _, decl := range w.fileAst.Decls { | |||||
| gd, ok := decl.(*ast.GenDecl) | |||||
| if !ok || gd.Tok != token.VAR { | |||||
| continue | |||||
| } | |||||
| for _, spec := range gd.Specs { | |||||
| spec := spec.(*ast.ValueSpec) | |||||
| if len(spec.Names) != 1 || len(spec.Values) != 1 { | |||||
| continue | |||||
| } | |||||
| ce, ok := spec.Values[0].(*ast.CallExpr) | |||||
| if !ok { | |||||
| continue | |||||
| } | |||||
| if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { | |||||
| continue | |||||
| } | |||||
| id := spec.Names[0] | |||||
| prefix := "err" | |||||
| if id.IsExported() { | |||||
| prefix = "Err" | |||||
| } | |||||
| if !strings.HasPrefix(id.Name, prefix) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: id, | |||||
| Confidence: 0.9, | |||||
| Category: "naming", | |||||
| Failure: fmt.Sprintf("error var %s should have name of the form %sFoo", id.Name, prefix), | |||||
| }) | |||||
| } | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| @@ -0,0 +1,67 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ErrorReturnRule lints given else constructs. | |||||
| type ErrorReturnRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintErrorReturn{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ErrorReturnRule) Name() string { | |||||
| return "error-return" | |||||
| } | |||||
| type lintErrorReturn struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintErrorReturn) Visit(n ast.Node) ast.Visitor { | |||||
| fn, ok := n.(*ast.FuncDecl) | |||||
| if !ok || fn.Type.Results == nil { | |||||
| return w | |||||
| } | |||||
| ret := fn.Type.Results.List | |||||
| if len(ret) <= 1 { | |||||
| return w | |||||
| } | |||||
| if isIdent(ret[len(ret)-1].Type, "error") { | |||||
| return nil | |||||
| } | |||||
| // An error return parameter should be the last parameter. | |||||
| // Flag any error parameters found before the last. | |||||
| for _, r := range ret[:len(ret)-1] { | |||||
| if isIdent(r.Type, "error") { | |||||
| w.onFailure(lint.Failure{ | |||||
| Category: "arg-order", | |||||
| Confidence: 0.9, | |||||
| Node: fn, | |||||
| Failure: "error should be the last type when returning multiple items", | |||||
| }) | |||||
| break // only flag one | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,98 @@ | |||||
| package rule | |||||
| import ( | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "strconv" | |||||
| "unicode" | |||||
| "unicode/utf8" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ErrorStringsRule lints given else constructs. | |||||
| type ErrorStringsRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintErrorStrings{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ErrorStringsRule) Name() string { | |||||
| return "error-strings" | |||||
| } | |||||
| type lintErrorStrings struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { | |||||
| ce, ok := n.(*ast.CallExpr) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { | |||||
| return w | |||||
| } | |||||
| if len(ce.Args) < 1 { | |||||
| return w | |||||
| } | |||||
| str, ok := ce.Args[0].(*ast.BasicLit) | |||||
| if !ok || str.Kind != token.STRING { | |||||
| return w | |||||
| } | |||||
| s, _ := strconv.Unquote(str.Value) // can assume well-formed Go | |||||
| if s == "" { | |||||
| return w | |||||
| } | |||||
| clean, conf := lintErrorString(s) | |||||
| if clean { | |||||
| return w | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: str, | |||||
| Confidence: conf, | |||||
| Category: "errors", | |||||
| Failure: "error strings should not be capitalized or end with punctuation or a newline", | |||||
| }) | |||||
| return w | |||||
| } | |||||
| func lintErrorString(s string) (isClean bool, conf float64) { | |||||
| const basicConfidence = 0.8 | |||||
| const capConfidence = basicConfidence - 0.2 | |||||
| first, firstN := utf8.DecodeRuneInString(s) | |||||
| last, _ := utf8.DecodeLastRuneInString(s) | |||||
| if last == '.' || last == ':' || last == '!' || last == '\n' { | |||||
| return false, basicConfidence | |||||
| } | |||||
| if unicode.IsUpper(first) { | |||||
| // People use proper nouns and exported Go identifiers in error strings, | |||||
| // so decrease the confidence of warnings for capitalization. | |||||
| if len(s) <= firstN { | |||||
| return false, capConfidence | |||||
| } | |||||
| // Flag strings starting with something that doesn't look like an initialism. | |||||
| if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { | |||||
| return false, capConfidence | |||||
| } | |||||
| } | |||||
| return true, 0 | |||||
| } | |||||
| @@ -0,0 +1,93 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "regexp" | |||||
| "strings" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ErrorfRule lints given else constructs. | |||||
| type ErrorfRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ErrorfRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| fileAst := file.AST | |||||
| walker := lintErrorf{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| file.Pkg.TypeCheck() | |||||
| ast.Walk(walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ErrorfRule) Name() string { | |||||
| return "errorf" | |||||
| } | |||||
| type lintErrorf struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintErrorf) Visit(n ast.Node) ast.Visitor { | |||||
| ce, ok := n.(*ast.CallExpr) | |||||
| if !ok || len(ce.Args) != 1 { | |||||
| return w | |||||
| } | |||||
| isErrorsNew := isPkgDot(ce.Fun, "errors", "New") | |||||
| var isTestingError bool | |||||
| se, ok := ce.Fun.(*ast.SelectorExpr) | |||||
| if ok && se.Sel.Name == "Error" { | |||||
| if typ := w.file.Pkg.TypeOf(se.X); typ != nil { | |||||
| isTestingError = typ.String() == "*testing.T" | |||||
| } | |||||
| } | |||||
| if !isErrorsNew && !isTestingError { | |||||
| return w | |||||
| } | |||||
| arg := ce.Args[0] | |||||
| ce, ok = arg.(*ast.CallExpr) | |||||
| if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { | |||||
| return w | |||||
| } | |||||
| errorfPrefix := "fmt" | |||||
| if isTestingError { | |||||
| errorfPrefix = w.file.Render(se.X) | |||||
| } | |||||
| failure := lint.Failure{ | |||||
| Category: "errors", | |||||
| Node: n, | |||||
| Confidence: 1, | |||||
| Failure: fmt.Sprintf("should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", w.file.Render(se), errorfPrefix), | |||||
| } | |||||
| m := srcLineWithMatch(w.file, ce, `^(.*)`+w.file.Render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) | |||||
| if m != nil { | |||||
| failure.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] | |||||
| } | |||||
| w.onFailure(failure) | |||||
| return w | |||||
| } | |||||
| func srcLineWithMatch(file *lint.File, node ast.Node, pattern string) (m []string) { | |||||
| line := srcLine(file.Content(), file.ToPosition(node.Pos())) | |||||
| line = strings.TrimSuffix(line, "\n") | |||||
| rx := regexp.MustCompile(pattern) | |||||
| return rx.FindStringSubmatch(line) | |||||
| } | |||||
| @@ -0,0 +1,272 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "go/token" | |||||
| "strings" | |||||
| "unicode" | |||||
| "unicode/utf8" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // ExportedRule lints given else constructs. | |||||
| type ExportedRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *ExportedRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| if isTest(file) { | |||||
| return failures | |||||
| } | |||||
| fileAst := file.AST | |||||
| walker := lintExported{ | |||||
| file: file, | |||||
| fileAst: fileAst, | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| genDeclMissingComments: make(map[*ast.GenDecl]bool), | |||||
| } | |||||
| ast.Walk(&walker, fileAst) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *ExportedRule) Name() string { | |||||
| return "exported" | |||||
| } | |||||
| type lintExported struct { | |||||
| file *lint.File | |||||
| fileAst *ast.File | |||||
| lastGen *ast.GenDecl | |||||
| genDeclMissingComments map[*ast.GenDecl]bool | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { | |||||
| if !ast.IsExported(fn.Name.Name) { | |||||
| // func is unexported | |||||
| return | |||||
| } | |||||
| kind := "function" | |||||
| name := fn.Name.Name | |||||
| if fn.Recv != nil && len(fn.Recv.List) > 0 { | |||||
| // method | |||||
| kind = "method" | |||||
| recv := receiverType(fn) | |||||
| if !ast.IsExported(recv) { | |||||
| // receiver is unexported | |||||
| return | |||||
| } | |||||
| if commonMethods[name] { | |||||
| return | |||||
| } | |||||
| switch name { | |||||
| case "Len", "Less", "Swap": | |||||
| if w.file.Pkg.Sortable[recv] { | |||||
| return | |||||
| } | |||||
| } | |||||
| name = recv + "." + name | |||||
| } | |||||
| if fn.Doc == nil { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: fn, | |||||
| Confidence: 1, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf("exported %s %s should have comment or be unexported", kind, name), | |||||
| }) | |||||
| return | |||||
| } | |||||
| s := normalizeText(fn.Doc.Text()) | |||||
| prefix := fn.Name.Name + " " | |||||
| if !strings.HasPrefix(s, prefix) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: fn.Doc, | |||||
| Confidence: 0.8, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), | |||||
| }) | |||||
| } | |||||
| } | |||||
| func (w *lintExported) checkStutter(id *ast.Ident, thing string) { | |||||
| pkg, name := w.fileAst.Name.Name, id.Name | |||||
| if !ast.IsExported(name) { | |||||
| // unexported name | |||||
| return | |||||
| } | |||||
| // A name stutters if the package name is a strict prefix | |||||
| // and the next character of the name starts a new word. | |||||
| if len(name) <= len(pkg) { | |||||
| // name is too short to stutter. | |||||
| // This permits the name to be the same as the package name. | |||||
| return | |||||
| } | |||||
| if !strings.EqualFold(pkg, name[:len(pkg)]) { | |||||
| return | |||||
| } | |||||
| // We can assume the name is well-formed UTF-8. | |||||
| // If the next rune after the package name is uppercase or an underscore | |||||
| // the it's starting a new word and thus this name stutters. | |||||
| rem := name[len(pkg):] | |||||
| if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: id, | |||||
| Confidence: 0.8, | |||||
| Category: "naming", | |||||
| Failure: fmt.Sprintf("%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem), | |||||
| }) | |||||
| } | |||||
| } | |||||
| func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { | |||||
| if !ast.IsExported(t.Name.Name) { | |||||
| return | |||||
| } | |||||
| if doc == nil { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: t, | |||||
| Confidence: 1, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf("exported type %v should have comment or be unexported", t.Name), | |||||
| }) | |||||
| return | |||||
| } | |||||
| s := normalizeText(doc.Text()) | |||||
| articles := [...]string{"A", "An", "The", "This"} | |||||
| for _, a := range articles { | |||||
| if t.Name.Name == a { | |||||
| continue | |||||
| } | |||||
| if strings.HasPrefix(s, a+" ") { | |||||
| s = s[len(a)+1:] | |||||
| break | |||||
| } | |||||
| } | |||||
| if !strings.HasPrefix(s, t.Name.Name+" ") { | |||||
| w.onFailure(lint.Failure{ | |||||
| Node: doc, | |||||
| Confidence: 1, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name), | |||||
| }) | |||||
| } | |||||
| } | |||||
| func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { | |||||
| kind := "var" | |||||
| if gd.Tok == token.CONST { | |||||
| kind = "const" | |||||
| } | |||||
| if len(vs.Names) > 1 { | |||||
| // Check that none are exported except for the first. | |||||
| for _, n := range vs.Names[1:] { | |||||
| if ast.IsExported(n.Name) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Category: "comments", | |||||
| Confidence: 1, | |||||
| Failure: fmt.Sprintf("exported %s %s should have its own declaration", kind, n.Name), | |||||
| Node: vs, | |||||
| }) | |||||
| return | |||||
| } | |||||
| } | |||||
| } | |||||
| // Only one name. | |||||
| name := vs.Names[0].Name | |||||
| if !ast.IsExported(name) { | |||||
| return | |||||
| } | |||||
| if vs.Doc == nil && gd.Doc == nil { | |||||
| if genDeclMissingComments[gd] { | |||||
| return | |||||
| } | |||||
| block := "" | |||||
| if kind == "const" && gd.Lparen.IsValid() { | |||||
| block = " (or a comment on this block)" | |||||
| } | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: vs, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf("exported %s %s should have comment%s or be unexported", kind, name, block), | |||||
| }) | |||||
| genDeclMissingComments[gd] = true | |||||
| return | |||||
| } | |||||
| // If this GenDecl has parens and a comment, we don't check its comment form. | |||||
| if gd.Lparen.IsValid() && gd.Doc != nil { | |||||
| return | |||||
| } | |||||
| // The relevant text to check will be on either vs.Doc or gd.Doc. | |||||
| // Use vs.Doc preferentially. | |||||
| doc := vs.Doc | |||||
| if doc == nil { | |||||
| doc = gd.Doc | |||||
| } | |||||
| prefix := name + " " | |||||
| s := normalizeText(doc.Text()) | |||||
| if !strings.HasPrefix(s, prefix) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: doc, | |||||
| Category: "comments", | |||||
| Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), | |||||
| }) | |||||
| } | |||||
| } | |||||
| // normalizeText is a helper function that normalizes comment strings by: | |||||
| // * removing one leading space | |||||
| // | |||||
| // This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly | |||||
| func normalizeText(t string) string { | |||||
| return strings.TrimPrefix(t, " ") | |||||
| } | |||||
| func (w *lintExported) Visit(n ast.Node) ast.Visitor { | |||||
| switch v := n.(type) { | |||||
| case *ast.GenDecl: | |||||
| if v.Tok == token.IMPORT { | |||||
| return nil | |||||
| } | |||||
| // token.CONST, token.TYPE or token.VAR | |||||
| w.lastGen = v | |||||
| return w | |||||
| case *ast.FuncDecl: | |||||
| w.lintFuncDoc(v) | |||||
| if v.Recv == nil { | |||||
| // Only check for stutter on functions, not methods. | |||||
| // Method names are not used package-qualified. | |||||
| w.checkStutter(v.Name, "func") | |||||
| } | |||||
| // Don't proceed inside funcs. | |||||
| return nil | |||||
| case *ast.TypeSpec: | |||||
| // inside a GenDecl, which usually has the doc | |||||
| doc := v.Doc | |||||
| if doc == nil { | |||||
| doc = w.lastGen.Doc | |||||
| } | |||||
| w.lintTypeDoc(v, doc) | |||||
| w.checkStutter(v.Name, "type") | |||||
| // Don't proceed inside types. | |||||
| return nil | |||||
| case *ast.ValueSpec: | |||||
| w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments) | |||||
| return nil | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,69 @@ | |||||
| package rule | |||||
| import ( | |||||
| "regexp" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // FileHeaderRule lints given else constructs. | |||||
| type FileHeaderRule struct{} | |||||
| var ( | |||||
| multiRegexp = regexp.MustCompile("^/\\*") | |||||
| singleRegexp = regexp.MustCompile("^//") | |||||
| ) | |||||
| // Apply applies the rule to given file. | |||||
| func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| if len(arguments) != 1 { | |||||
| panic(`invalid configuration for "file-header" rule`) | |||||
| } | |||||
| header, ok := arguments[0].(string) | |||||
| if !ok { | |||||
| panic(`invalid argument for "file-header" rule: first argument should be a string`) | |||||
| } | |||||
| failure := []lint.Failure{ | |||||
| { | |||||
| Node: file.AST, | |||||
| Confidence: 1, | |||||
| Failure: "the file doesn't have an appropriate header", | |||||
| }, | |||||
| } | |||||
| if len(file.AST.Comments) == 0 { | |||||
| return failure | |||||
| } | |||||
| g := file.AST.Comments[0] | |||||
| if g == nil { | |||||
| return failure | |||||
| } | |||||
| comment := "" | |||||
| for _, c := range g.List { | |||||
| text := c.Text | |||||
| if multiRegexp.Match([]byte(text)) { | |||||
| text = text[2 : len(text)-2] | |||||
| } else if singleRegexp.Match([]byte(text)) { | |||||
| text = text[2:] | |||||
| } | |||||
| comment += text | |||||
| } | |||||
| regex, err := regexp.Compile(header) | |||||
| if err != nil { | |||||
| panic(err.Error()) | |||||
| } | |||||
| if !regex.Match([]byte(comment)) { | |||||
| return failure | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *FileHeaderRule) Name() string { | |||||
| return "file-header" | |||||
| } | |||||
| @@ -0,0 +1,104 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/mgechev/revive/lint" | |||||
| "go/ast" | |||||
| ) | |||||
| // FlagParamRule lints given else constructs. | |||||
| type FlagParamRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *FlagParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintFlagParamRule{onFailure: onFailure} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *FlagParamRule) Name() string { | |||||
| return "flag-parameter" | |||||
| } | |||||
| type lintFlagParamRule struct { | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintFlagParamRule) Visit(node ast.Node) ast.Visitor { | |||||
| fd, ok := node.(*ast.FuncDecl) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if fd.Body == nil { | |||||
| return nil // skip whole function declaration | |||||
| } | |||||
| for _, p := range fd.Type.Params.List { | |||||
| t := p.Type | |||||
| id, ok := t.(*ast.Ident) | |||||
| if !ok { | |||||
| continue | |||||
| } | |||||
| if id.Name != "bool" { | |||||
| continue | |||||
| } | |||||
| cv := conditionVisitor{p.Names, fd, w} | |||||
| ast.Walk(cv, fd.Body) | |||||
| } | |||||
| return w | |||||
| } | |||||
| type conditionVisitor struct { | |||||
| ids []*ast.Ident | |||||
| fd *ast.FuncDecl | |||||
| linter lintFlagParamRule | |||||
| } | |||||
| func (w conditionVisitor) Visit(node ast.Node) ast.Visitor { | |||||
| ifStmt, ok := node.(*ast.IfStmt) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| fselect := func(n ast.Node) bool { | |||||
| ident, ok := n.(*ast.Ident) | |||||
| if !ok { | |||||
| return false | |||||
| } | |||||
| for _, id := range w.ids { | |||||
| if ident.Name == id.Name { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| uses := pick(ifStmt.Cond, fselect, nil) | |||||
| if len(uses) < 1 { | |||||
| return w | |||||
| } | |||||
| w.linter.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Node: w.fd.Type.Params, | |||||
| Category: "bad practice", | |||||
| Failure: fmt.Sprintf("parameter '%s' seems to be a control flag, avoid control coupling", uses[0]), | |||||
| }) | |||||
| return nil | |||||
| } | |||||
| @@ -0,0 +1,68 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // FunctionResultsLimitRule lints given else constructs. | |||||
| type FunctionResultsLimitRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { | |||||
| if len(arguments) != 1 { | |||||
| panic(`invalid configuration for "function-result-limit"`) | |||||
| } | |||||
| max, ok := arguments[0].(int64) // Alt. non panicking version | |||||
| if !ok { | |||||
| panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) | |||||
| } | |||||
| if max < 0 { | |||||
| panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) | |||||
| } | |||||
| var failures []lint.Failure | |||||
| walker := lintFunctionResultsNum{ | |||||
| max: int(max), | |||||
| onFailure: func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| }, | |||||
| } | |||||
| ast.Walk(walker, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *FunctionResultsLimitRule) Name() string { | |||||
| return "function-result-limit" | |||||
| } | |||||
| type lintFunctionResultsNum struct { | |||||
| max int | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor { | |||||
| node, ok := n.(*ast.FuncDecl) | |||||
| if ok { | |||||
| num := 0 | |||||
| if node.Type.Results != nil { | |||||
| num = node.Type.Results.NumFields() | |||||
| } | |||||
| if num > w.max { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 1, | |||||
| Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num), | |||||
| Node: node.Type, | |||||
| }) | |||||
| return w | |||||
| } | |||||
| } | |||||
| return w | |||||
| } | |||||
| @@ -0,0 +1,70 @@ | |||||
| package rule | |||||
| import ( | |||||
| "fmt" | |||||
| "go/ast" | |||||
| "strings" | |||||
| "github.com/mgechev/revive/lint" | |||||
| ) | |||||
| // GetReturnRule lints given else constructs. | |||||
| type GetReturnRule struct{} | |||||
| // Apply applies the rule to given file. | |||||
| func (r *GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { | |||||
| var failures []lint.Failure | |||||
| onFailure := func(failure lint.Failure) { | |||||
| failures = append(failures, failure) | |||||
| } | |||||
| w := lintReturnRule{onFailure} | |||||
| ast.Walk(w, file.AST) | |||||
| return failures | |||||
| } | |||||
| // Name returns the rule name. | |||||
| func (r *GetReturnRule) Name() string { | |||||
| return "get-return" | |||||
| } | |||||
| type lintReturnRule struct { | |||||
| onFailure func(lint.Failure) | |||||
| } | |||||
| func isGetter(name string) bool { | |||||
| if strings.HasPrefix(strings.ToUpper(name), "GET") { | |||||
| if len(name) > 3 { | |||||
| c := name[3] | |||||
| return !(c >= 'a' && c <= 'z') | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func hasResults(rs *ast.FieldList) bool { | |||||
| return rs != nil && len(rs.List) > 0 | |||||
| } | |||||
| func (w lintReturnRule) Visit(node ast.Node) ast.Visitor { | |||||
| fd, ok := node.(*ast.FuncDecl) | |||||
| if !ok { | |||||
| return w | |||||
| } | |||||
| if !isGetter(fd.Name.Name) { | |||||
| return w | |||||
| } | |||||
| if !hasResults(fd.Type.Results) { | |||||
| w.onFailure(lint.Failure{ | |||||
| Confidence: 0.8, | |||||
| Node: fd, | |||||
| Category: "logic", | |||||
| Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name), | |||||
| }) | |||||
| } | |||||
| return w | |||||
| } | |||||