* Fix validate() function to handle errors in embedded anon structs * Implement webhook branch filter See #2025, #3998.tags/v1.21.12.1
| @@ -47,6 +47,7 @@ require ( | |||||
| github.com/go-sql-driver/mysql v1.4.1 | github.com/go-sql-driver/mysql v1.4.1 | ||||
| github.com/go-swagger/go-swagger v0.20.1 | github.com/go-swagger/go-swagger v0.20.1 | ||||
| github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b | github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b | ||||
| github.com/gobwas/glob v0.2.3 | |||||
| github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 | github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 | ||||
| github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | ||||
| github.com/google/go-github/v24 v24.0.1 | github.com/google/go-github/v24 v24.0.1 | ||||
| @@ -236,6 +236,8 @@ github.com/go-xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a/go.mod h1:56xuuq | |||||
| github.com/go-xorm/xorm v0.7.6/go.mod h1:nqz2TAsuOHWH2yk4FYWtacCGgdbrcdZ5mF1XadqEHls= | github.com/go-xorm/xorm v0.7.6/go.mod h1:nqz2TAsuOHWH2yk4FYWtacCGgdbrcdZ5mF1XadqEHls= | ||||
| github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b h1:Y0hWUheXDHpIs7BWtJcykO4d1VOsVDKg1PsP5YJwxxM= | github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b h1:Y0hWUheXDHpIs7BWtJcykO4d1VOsVDKg1PsP5YJwxxM= | ||||
| github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b/go.mod h1:nqz2TAsuOHWH2yk4FYWtacCGgdbrcdZ5mF1XadqEHls= | github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b/go.mod h1:nqz2TAsuOHWH2yk4FYWtacCGgdbrcdZ5mF1XadqEHls= | ||||
| github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= | |||||
| github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= | |||||
| github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 h1:deE7ritpK04PgtpyVOS2TYcQEld9qLCD5b5EbVNOuLA= | github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 h1:deE7ritpK04PgtpyVOS2TYcQEld9qLCD5b5EbVNOuLA= | ||||
| github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561/go.mod h1:YgYOrVn3Nj9Tq0EvjmFbphRytDj7JNRoWSStJZWDJTQ= | github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561/go.mod h1:YgYOrVn3Nj9Tq0EvjmFbphRytDj7JNRoWSStJZWDJTQ= | ||||
| github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= | github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= | ||||
| @@ -22,3 +22,10 @@ | |||||
| content_type: 1 # json | content_type: 1 # json | ||||
| events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}' | events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}' | ||||
| is_active: true | is_active: true | ||||
| - | |||||
| id: 4 | |||||
| repo_id: 2 | |||||
| url: www.example.com/url4 | |||||
| content_type: 1 # json | |||||
| events: '{"push_only":true,"branch_filter":"{master,feature*}"}' | |||||
| is_active: true | |||||
| @@ -19,12 +19,14 @@ import ( | |||||
| "strings" | "strings" | ||||
| "time" | "time" | ||||
| "code.gitea.io/gitea/modules/git" | |||||
| "code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
| "code.gitea.io/gitea/modules/setting" | "code.gitea.io/gitea/modules/setting" | ||||
| api "code.gitea.io/gitea/modules/structs" | api "code.gitea.io/gitea/modules/structs" | ||||
| "code.gitea.io/gitea/modules/sync" | "code.gitea.io/gitea/modules/sync" | ||||
| "code.gitea.io/gitea/modules/timeutil" | "code.gitea.io/gitea/modules/timeutil" | ||||
| "github.com/gobwas/glob" | |||||
| gouuid "github.com/satori/go.uuid" | gouuid "github.com/satori/go.uuid" | ||||
| "github.com/unknwon/com" | "github.com/unknwon/com" | ||||
| ) | ) | ||||
| @@ -84,9 +86,10 @@ type HookEvents struct { | |||||
| // HookEvent represents events that will delivery hook. | // HookEvent represents events that will delivery hook. | ||||
| type HookEvent struct { | type HookEvent struct { | ||||
| PushOnly bool `json:"push_only"` | |||||
| SendEverything bool `json:"send_everything"` | |||||
| ChooseEvents bool `json:"choose_events"` | |||||
| PushOnly bool `json:"push_only"` | |||||
| SendEverything bool `json:"send_everything"` | |||||
| ChooseEvents bool `json:"choose_events"` | |||||
| BranchFilter string `json:"branch_filter"` | |||||
| HookEvents `json:"events"` | HookEvents `json:"events"` | ||||
| } | } | ||||
| @@ -256,6 +259,21 @@ func (w *Webhook) EventsArray() []string { | |||||
| return events | return events | ||||
| } | } | ||||
| func (w *Webhook) checkBranch(branch string) bool { | |||||
| if w.BranchFilter == "" || w.BranchFilter == "*" { | |||||
| return true | |||||
| } | |||||
| g, err := glob.Compile(w.BranchFilter) | |||||
| if err != nil { | |||||
| // should not really happen as BranchFilter is validated | |||||
| log.Error("CheckBranch failed: %s", err) | |||||
| return false | |||||
| } | |||||
| return g.Match(branch) | |||||
| } | |||||
| // CreateWebhook creates a new web hook. | // CreateWebhook creates a new web hook. | ||||
| func CreateWebhook(w *Webhook) error { | func CreateWebhook(w *Webhook) error { | ||||
| return createWebhook(x, w) | return createWebhook(x, w) | ||||
| @@ -651,6 +669,25 @@ func PrepareWebhook(w *Webhook, repo *Repository, event HookEventType, p api.Pay | |||||
| return prepareWebhook(x, w, repo, event, p) | return prepareWebhook(x, w, repo, event, p) | ||||
| } | } | ||||
| // getPayloadBranch returns branch for hook event, if applicable. | |||||
| func getPayloadBranch(p api.Payloader) string { | |||||
| switch pp := p.(type) { | |||||
| case *api.CreatePayload: | |||||
| if pp.RefType == "branch" { | |||||
| return pp.Ref | |||||
| } | |||||
| case *api.DeletePayload: | |||||
| if pp.RefType == "branch" { | |||||
| return pp.Ref | |||||
| } | |||||
| case *api.PushPayload: | |||||
| if strings.HasPrefix(pp.Ref, git.BranchPrefix) { | |||||
| return pp.Ref[len(git.BranchPrefix):] | |||||
| } | |||||
| } | |||||
| return "" | |||||
| } | |||||
| func prepareWebhook(e Engine, w *Webhook, repo *Repository, event HookEventType, p api.Payloader) error { | func prepareWebhook(e Engine, w *Webhook, repo *Repository, event HookEventType, p api.Payloader) error { | ||||
| for _, e := range w.eventCheckers() { | for _, e := range w.eventCheckers() { | ||||
| if event == e.typ { | if event == e.typ { | ||||
| @@ -660,6 +697,15 @@ func prepareWebhook(e Engine, w *Webhook, repo *Repository, event HookEventType, | |||||
| } | } | ||||
| } | } | ||||
| // If payload has no associated branch (e.g. it's a new tag, issue, etc.), | |||||
| // branch filter has no effect. | |||||
| if branch := getPayloadBranch(p); branch != "" { | |||||
| if !w.checkBranch(branch) { | |||||
| log.Info("Branch %q doesn't match branch filter %q, skipping", branch, w.BranchFilter) | |||||
| return nil | |||||
| } | |||||
| } | |||||
| var payloader api.Payloader | var payloader api.Payloader | ||||
| var err error | var err error | ||||
| // Use separate objects so modifications won't be made on payload on non-Gogs/Gitea type hooks. | // Use separate objects so modifications won't be made on payload on non-Gogs/Gitea type hooks. | ||||
| @@ -270,6 +270,40 @@ func TestPrepareWebhooks(t *testing.T) { | |||||
| } | } | ||||
| } | } | ||||
| func TestPrepareWebhooksBranchFilterMatch(t *testing.T) { | |||||
| assert.NoError(t, PrepareTestDatabase()) | |||||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository) | |||||
| hookTasks := []*HookTask{ | |||||
| {RepoID: repo.ID, HookID: 4, EventType: HookEventPush}, | |||||
| } | |||||
| for _, hookTask := range hookTasks { | |||||
| AssertNotExistsBean(t, hookTask) | |||||
| } | |||||
| // this test also ensures that * doesn't handle / in any special way (like shell would) | |||||
| assert.NoError(t, PrepareWebhooks(repo, HookEventPush, &api.PushPayload{Ref: "refs/heads/feature/7791"})) | |||||
| for _, hookTask := range hookTasks { | |||||
| AssertExistsAndLoadBean(t, hookTask) | |||||
| } | |||||
| } | |||||
| func TestPrepareWebhooksBranchFilterNoMatch(t *testing.T) { | |||||
| assert.NoError(t, PrepareTestDatabase()) | |||||
| repo := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository) | |||||
| hookTasks := []*HookTask{ | |||||
| {RepoID: repo.ID, HookID: 4, EventType: HookEventPush}, | |||||
| } | |||||
| for _, hookTask := range hookTasks { | |||||
| AssertNotExistsBean(t, hookTask) | |||||
| } | |||||
| assert.NoError(t, PrepareWebhooks(repo, HookEventPush, &api.PushPayload{Ref: "refs/heads/fix_weird_bug"})) | |||||
| for _, hookTask := range hookTasks { | |||||
| AssertNotExistsBean(t, hookTask) | |||||
| } | |||||
| } | |||||
| // TODO TestHookTask_deliver | // TODO TestHookTask_deliver | ||||
| // TODO TestDeliverHooks | // TODO TestDeliverHooks | ||||
| @@ -310,6 +310,10 @@ func validate(errs binding.Errors, data map[string]interface{}, f Form, l macaro | |||||
| } | } | ||||
| data["HasError"] = true | data["HasError"] = true | ||||
| // If the field with name errs[0].FieldNames[0] is not found in form | |||||
| // somehow, some code later on will panic on Data["ErrorMsg"].(string). | |||||
| // So initialize it to some default. | |||||
| data["ErrorMsg"] = l.Tr("form.unknown_error") | |||||
| AssignForm(f, data) | AssignForm(f, data) | ||||
| typ := reflect.TypeOf(f) | typ := reflect.TypeOf(f) | ||||
| @@ -320,16 +324,9 @@ func validate(errs binding.Errors, data map[string]interface{}, f Form, l macaro | |||||
| val = val.Elem() | val = val.Elem() | ||||
| } | } | ||||
| for i := 0; i < typ.NumField(); i++ { | |||||
| field := typ.Field(i) | |||||
| if field, ok := typ.FieldByName(errs[0].FieldNames[0]); ok { | |||||
| fieldName := field.Tag.Get("form") | fieldName := field.Tag.Get("form") | ||||
| // Allow ignored fields in the struct | |||||
| if fieldName == "-" { | |||||
| continue | |||||
| } | |||||
| if errs[0].FieldNames[0] == field.Name { | |||||
| if fieldName != "-" { | |||||
| data["Err_"+field.Name] = true | data["Err_"+field.Name] = true | ||||
| trName := field.Tag.Get("locale") | trName := field.Tag.Get("locale") | ||||
| @@ -360,6 +357,8 @@ func validate(errs binding.Errors, data map[string]interface{}, f Form, l macaro | |||||
| data["ErrorMsg"] = trName + l.Tr("form.url_error") | data["ErrorMsg"] = trName + l.Tr("form.url_error") | ||||
| case binding.ERR_INCLUDE: | case binding.ERR_INCLUDE: | ||||
| data["ErrorMsg"] = trName + l.Tr("form.include_error", GetInclude(field)) | data["ErrorMsg"] = trName + l.Tr("form.include_error", GetInclude(field)) | ||||
| case validation.ErrGlobPattern: | |||||
| data["ErrorMsg"] = trName + l.Tr("form.glob_pattern_error", errs[0].Message) | |||||
| default: | default: | ||||
| data["ErrorMsg"] = l.Tr("form.unknown_error") + " " + errs[0].Classification | data["ErrorMsg"] = l.Tr("form.unknown_error") + " " + errs[0].Classification | ||||
| } | } | ||||
| @@ -185,6 +185,7 @@ type WebhookForm struct { | |||||
| PullRequest bool | PullRequest bool | ||||
| Repository bool | Repository bool | ||||
| Active bool | Active bool | ||||
| BranchFilter string `binding:"GlobPattern"` | |||||
| } | } | ||||
| // PushOnly if the hook will be triggered when push | // PushOnly if the hook will be triggered when push | ||||
| @@ -40,17 +40,19 @@ type CreateHookOption struct { | |||||
| // enum: gitea,gogs,slack,discord | // enum: gitea,gogs,slack,discord | ||||
| Type string `json:"type" binding:"Required"` | Type string `json:"type" binding:"Required"` | ||||
| // required: true | // required: true | ||||
| Config map[string]string `json:"config" binding:"Required"` | |||||
| Events []string `json:"events"` | |||||
| Config map[string]string `json:"config" binding:"Required"` | |||||
| Events []string `json:"events"` | |||||
| BranchFilter string `json:"branch_filter" binding:"GlobPattern"` | |||||
| // default: false | // default: false | ||||
| Active bool `json:"active"` | Active bool `json:"active"` | ||||
| } | } | ||||
| // EditHookOption options when modify one hook | // EditHookOption options when modify one hook | ||||
| type EditHookOption struct { | type EditHookOption struct { | ||||
| Config map[string]string `json:"config"` | |||||
| Events []string `json:"events"` | |||||
| Active *bool `json:"active"` | |||||
| Config map[string]string `json:"config"` | |||||
| Events []string `json:"events"` | |||||
| BranchFilter string `json:"branch_filter" binding:"GlobPattern"` | |||||
| Active *bool `json:"active"` | |||||
| } | } | ||||
| // Payloader payload is some part of one hook | // Payloader payload is some part of one hook | ||||
| @@ -10,11 +10,15 @@ import ( | |||||
| "strings" | "strings" | ||||
| "gitea.com/macaron/binding" | "gitea.com/macaron/binding" | ||||
| "github.com/gobwas/glob" | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| // ErrGitRefName is git reference name error | // ErrGitRefName is git reference name error | ||||
| ErrGitRefName = "GitRefNameError" | ErrGitRefName = "GitRefNameError" | ||||
| // ErrGlobPattern is returned when glob pattern is invalid | |||||
| ErrGlobPattern = "GlobPattern" | |||||
| ) | ) | ||||
| var ( | var ( | ||||
| @@ -28,6 +32,7 @@ var ( | |||||
| func AddBindingRules() { | func AddBindingRules() { | ||||
| addGitRefNameBindingRule() | addGitRefNameBindingRule() | ||||
| addValidURLBindingRule() | addValidURLBindingRule() | ||||
| addGlobPatternRule() | |||||
| } | } | ||||
| func addGitRefNameBindingRule() { | func addGitRefNameBindingRule() { | ||||
| @@ -82,6 +87,26 @@ func addValidURLBindingRule() { | |||||
| }) | }) | ||||
| } | } | ||||
| func addGlobPatternRule() { | |||||
| binding.AddRule(&binding.Rule{ | |||||
| IsMatch: func(rule string) bool { | |||||
| return rule == "GlobPattern" | |||||
| }, | |||||
| IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { | |||||
| str := fmt.Sprintf("%v", val) | |||||
| if len(str) != 0 { | |||||
| if _, err := glob.Compile(str); err != nil { | |||||
| errs.Add([]string{name}, ErrGlobPattern, err.Error()) | |||||
| return false, errs | |||||
| } | |||||
| } | |||||
| return true, errs | |||||
| }, | |||||
| }) | |||||
| } | |||||
| func portOnly(hostport string) string { | func portOnly(hostport string) string { | ||||
| colon := strings.IndexByte(hostport, ':') | colon := strings.IndexByte(hostport, ':') | ||||
| if colon == -1 { | if colon == -1 { | ||||
| @@ -26,8 +26,9 @@ type ( | |||||
| } | } | ||||
| TestForm struct { | TestForm struct { | ||||
| BranchName string `form:"BranchName" binding:"GitRefName"` | |||||
| URL string `form:"ValidUrl" binding:"ValidUrl"` | |||||
| BranchName string `form:"BranchName" binding:"GitRefName"` | |||||
| URL string `form:"ValidUrl" binding:"ValidUrl"` | |||||
| GlobPattern string `form:"GlobPattern" binding:"GlobPattern"` | |||||
| } | } | ||||
| ) | ) | ||||
| @@ -0,0 +1,62 @@ | |||||
| // Copyright 2019 The Gitea Authors. All rights reserved. | |||||
| // Use of this source code is governed by a MIT-style | |||||
| // license that can be found in the LICENSE file. | |||||
| package validation | |||||
| import ( | |||||
| "testing" | |||||
| "gitea.com/macaron/binding" | |||||
| "github.com/gobwas/glob" | |||||
| ) | |||||
| func getGlobPatternErrorString(pattern string) string { | |||||
| // It would be unwise to rely on that glob | |||||
| // compilation errors don't ever change. | |||||
| if _, err := glob.Compile(pattern); err != nil { | |||||
| return err.Error() | |||||
| } | |||||
| return "" | |||||
| } | |||||
| var globValidationTestCases = []validationTestCase{ | |||||
| { | |||||
| description: "Empty glob pattern", | |||||
| data: TestForm{ | |||||
| GlobPattern: "", | |||||
| }, | |||||
| expectedErrors: binding.Errors{}, | |||||
| }, | |||||
| { | |||||
| description: "Valid glob", | |||||
| data: TestForm{ | |||||
| GlobPattern: "{master,release*}", | |||||
| }, | |||||
| expectedErrors: binding.Errors{}, | |||||
| }, | |||||
| { | |||||
| description: "Invalid glob", | |||||
| data: TestForm{ | |||||
| GlobPattern: "[a-", | |||||
| }, | |||||
| expectedErrors: binding.Errors{ | |||||
| binding.Error{ | |||||
| FieldNames: []string{"GlobPattern"}, | |||||
| Classification: ErrGlobPattern, | |||||
| Message: getGlobPatternErrorString("[a-"), | |||||
| }, | |||||
| }, | |||||
| }, | |||||
| } | |||||
| func Test_GlobPatternValidation(t *testing.T) { | |||||
| AddBindingRules() | |||||
| for _, testCase := range globValidationTestCases { | |||||
| t.Run(testCase.description, func(t *testing.T) { | |||||
| performValidationTest(t, testCase) | |||||
| }) | |||||
| } | |||||
| } | |||||
| @@ -300,6 +300,7 @@ max_size_error = ` must contain at most %s characters.` | |||||
| email_error = ` is not a valid email address.` | email_error = ` is not a valid email address.` | ||||
| url_error = ` is not a valid URL.` | url_error = ` is not a valid URL.` | ||||
| include_error = ` must contain substring '%s'.` | include_error = ` must contain substring '%s'.` | ||||
| glob_pattern_error = ` glob pattern is invalid: %s.` | |||||
| unknown_error = Unknown error: | unknown_error = Unknown error: | ||||
| captcha_incorrect = The CAPTCHA code is incorrect. | captcha_incorrect = The CAPTCHA code is incorrect. | ||||
| password_not_match = The passwords do not match. | password_not_match = The passwords do not match. | ||||
| @@ -1258,6 +1259,8 @@ settings.event_pull_request = Pull Request | |||||
| settings.event_pull_request_desc = Pull request opened, closed, reopened, edited, approved, rejected, review comment, assigned, unassigned, label updated, label cleared or synchronized. | settings.event_pull_request_desc = Pull request opened, closed, reopened, edited, approved, rejected, review comment, assigned, unassigned, label updated, label cleared or synchronized. | ||||
| settings.event_push = Push | settings.event_push = Push | ||||
| settings.event_push_desc = Git push to a repository. | settings.event_push_desc = Git push to a repository. | ||||
| settings.branch_filter = Branch filter | |||||
| settings.branch_filter_desc = Branch whitelist for push, branch creation and branch deletion events, specified as glob pattern. If empty or <code>*</code>, events for all branches are reported. See <a href="https://godoc.org/github.com/gobwas/glob#Compile">github.com/gobwas/glob</a> documentation for syntax. Examples: <code>master</code>, <code>{master,release*}</code>. | |||||
| settings.event_repository = Repository | settings.event_repository = Repository | ||||
| settings.event_repository_desc = Repository created or deleted. | settings.event_repository_desc = Repository created or deleted. | ||||
| settings.active = Active | settings.active = Active | ||||
| @@ -112,6 +112,7 @@ func addHook(ctx *context.APIContext, form *api.CreateHookOption, orgID, repoID | |||||
| Repository: com.IsSliceContainsStr(form.Events, string(models.HookEventRepository)), | Repository: com.IsSliceContainsStr(form.Events, string(models.HookEventRepository)), | ||||
| Release: com.IsSliceContainsStr(form.Events, string(models.HookEventRelease)), | Release: com.IsSliceContainsStr(form.Events, string(models.HookEventRelease)), | ||||
| }, | }, | ||||
| BranchFilter: form.BranchFilter, | |||||
| }, | }, | ||||
| IsActive: form.Active, | IsActive: form.Active, | ||||
| HookTaskType: models.ToHookTaskType(form.Type), | HookTaskType: models.ToHookTaskType(form.Type), | ||||
| @@ -236,6 +237,7 @@ func editHook(ctx *context.APIContext, form *api.EditHookOption, w *models.Webho | |||||
| w.PullRequest = com.IsSliceContainsStr(form.Events, string(models.HookEventPullRequest)) | w.PullRequest = com.IsSliceContainsStr(form.Events, string(models.HookEventPullRequest)) | ||||
| w.Repository = com.IsSliceContainsStr(form.Events, string(models.HookEventRepository)) | w.Repository = com.IsSliceContainsStr(form.Events, string(models.HookEventRepository)) | ||||
| w.Release = com.IsSliceContainsStr(form.Events, string(models.HookEventRelease)) | w.Release = com.IsSliceContainsStr(form.Events, string(models.HookEventRelease)) | ||||
| w.BranchFilter = form.BranchFilter | |||||
| if err := w.UpdateEvent(); err != nil { | if err := w.UpdateEvent(); err != nil { | ||||
| ctx.Error(500, "UpdateEvent", err) | ctx.Error(500, "UpdateEvent", err) | ||||
| @@ -145,6 +145,7 @@ func ParseHookEvent(form auth.WebhookForm) *models.HookEvent { | |||||
| PullRequest: form.PullRequest, | PullRequest: form.PullRequest, | ||||
| Repository: form.Repository, | Repository: form.Repository, | ||||
| }, | }, | ||||
| BranchFilter: form.BranchFilter, | |||||
| } | } | ||||
| } | } | ||||
| @@ -116,6 +116,13 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <!-- Branch filter --> | |||||
| <div class="field"> | |||||
| <label for="branch_filter">{{.i18n.Tr "repo.settings.branch_filter"}}</label> | |||||
| <input name="branch_filter" type="text" tabindex="0" value="{{or .Webhook.BranchFilter "*"}}"> | |||||
| <span class="help">{{.i18n.Tr "repo.settings.branch_filter_desc" | Str2html}}</span> | |||||
| </div> | |||||
| <div class="ui divider"></div> | <div class="ui divider"></div> | ||||
| <div class="inline field"> | <div class="inline field"> | ||||
| @@ -7528,6 +7528,10 @@ | |||||
| "default": false, | "default": false, | ||||
| "x-go-name": "Active" | "x-go-name": "Active" | ||||
| }, | }, | ||||
| "branch_filter": { | |||||
| "type": "string", | |||||
| "x-go-name": "BranchFilter" | |||||
| }, | |||||
| "config": { | "config": { | ||||
| "type": "object", | "type": "object", | ||||
| "additionalProperties": { | "additionalProperties": { | ||||
| @@ -8124,6 +8128,10 @@ | |||||
| "type": "boolean", | "type": "boolean", | ||||
| "x-go-name": "Active" | "x-go-name": "Active" | ||||
| }, | }, | ||||
| "branch_filter": { | |||||
| "type": "string", | |||||
| "x-go-name": "BranchFilter" | |||||
| }, | |||||
| "config": { | "config": { | ||||
| "type": "object", | "type": "object", | ||||
| "additionalProperties": { | "additionalProperties": { | ||||
| @@ -0,0 +1,8 @@ | |||||
| glob.iml | |||||
| .idea | |||||
| *.cpu | |||||
| *.mem | |||||
| *.test | |||||
| *.dot | |||||
| *.png | |||||
| *.svg | |||||
| @@ -0,0 +1,9 @@ | |||||
| sudo: false | |||||
| language: go | |||||
| go: | |||||
| - 1.5.3 | |||||
| script: | |||||
| - go test -v ./... | |||||
| @@ -0,0 +1,21 @@ | |||||
| The MIT License (MIT) | |||||
| Copyright (c) 2016 Sergey Kamardin | |||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | |||||
| of this software and associated documentation files (the "Software"), to deal | |||||
| in the Software without restriction, including without limitation the rights | |||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||||
| copies of the Software, and to permit persons to whom the Software is | |||||
| furnished to do so, subject to the following conditions: | |||||
| The above copyright notice and this permission notice shall be included in all | |||||
| copies or substantial portions of the Software. | |||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||||
| SOFTWARE. | |||||
| @@ -0,0 +1,26 @@ | |||||
| #! /bin/bash | |||||
| bench() { | |||||
| filename="/tmp/$1-$2.bench" | |||||
| if test -e "${filename}"; | |||||
| then | |||||
| echo "Already exists ${filename}" | |||||
| else | |||||
| backup=`git rev-parse --abbrev-ref HEAD` | |||||
| git checkout $1 | |||||
| echo -n "Creating ${filename}... " | |||||
| go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem | |||||
| echo "OK" | |||||
| git checkout ${backup} | |||||
| sleep 5 | |||||
| fi | |||||
| } | |||||
| to=$1 | |||||
| current=`git rev-parse --abbrev-ref HEAD` | |||||
| bench ${to} $2 | |||||
| bench ${current} $2 | |||||
| benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench" | |||||
| @@ -0,0 +1,525 @@ | |||||
| package compiler | |||||
| // TODO use constructor with all matchers, and to their structs private | |||||
| // TODO glue multiple Text nodes (like after QuoteMeta) | |||||
| import ( | |||||
| "fmt" | |||||
| "reflect" | |||||
| "github.com/gobwas/glob/match" | |||||
| "github.com/gobwas/glob/syntax/ast" | |||||
| "github.com/gobwas/glob/util/runes" | |||||
| ) | |||||
| func optimizeMatcher(matcher match.Matcher) match.Matcher { | |||||
| switch m := matcher.(type) { | |||||
| case match.Any: | |||||
| if len(m.Separators) == 0 { | |||||
| return match.NewSuper() | |||||
| } | |||||
| case match.AnyOf: | |||||
| if len(m.Matchers) == 1 { | |||||
| return m.Matchers[0] | |||||
| } | |||||
| return m | |||||
| case match.List: | |||||
| if m.Not == false && len(m.List) == 1 { | |||||
| return match.NewText(string(m.List)) | |||||
| } | |||||
| return m | |||||
| case match.BTree: | |||||
| m.Left = optimizeMatcher(m.Left) | |||||
| m.Right = optimizeMatcher(m.Right) | |||||
| r, ok := m.Value.(match.Text) | |||||
| if !ok { | |||||
| return m | |||||
| } | |||||
| var ( | |||||
| leftNil = m.Left == nil | |||||
| rightNil = m.Right == nil | |||||
| ) | |||||
| if leftNil && rightNil { | |||||
| return match.NewText(r.Str) | |||||
| } | |||||
| _, leftSuper := m.Left.(match.Super) | |||||
| lp, leftPrefix := m.Left.(match.Prefix) | |||||
| la, leftAny := m.Left.(match.Any) | |||||
| _, rightSuper := m.Right.(match.Super) | |||||
| rs, rightSuffix := m.Right.(match.Suffix) | |||||
| ra, rightAny := m.Right.(match.Any) | |||||
| switch { | |||||
| case leftSuper && rightSuper: | |||||
| return match.NewContains(r.Str, false) | |||||
| case leftSuper && rightNil: | |||||
| return match.NewSuffix(r.Str) | |||||
| case rightSuper && leftNil: | |||||
| return match.NewPrefix(r.Str) | |||||
| case leftNil && rightSuffix: | |||||
| return match.NewPrefixSuffix(r.Str, rs.Suffix) | |||||
| case rightNil && leftPrefix: | |||||
| return match.NewPrefixSuffix(lp.Prefix, r.Str) | |||||
| case rightNil && leftAny: | |||||
| return match.NewSuffixAny(r.Str, la.Separators) | |||||
| case leftNil && rightAny: | |||||
| return match.NewPrefixAny(r.Str, ra.Separators) | |||||
| } | |||||
| return m | |||||
| } | |||||
| return matcher | |||||
| } | |||||
| func compileMatchers(matchers []match.Matcher) (match.Matcher, error) { | |||||
| if len(matchers) == 0 { | |||||
| return nil, fmt.Errorf("compile error: need at least one matcher") | |||||
| } | |||||
| if len(matchers) == 1 { | |||||
| return matchers[0], nil | |||||
| } | |||||
| if m := glueMatchers(matchers); m != nil { | |||||
| return m, nil | |||||
| } | |||||
| idx := -1 | |||||
| maxLen := -1 | |||||
| var val match.Matcher | |||||
| for i, matcher := range matchers { | |||||
| if l := matcher.Len(); l != -1 && l >= maxLen { | |||||
| maxLen = l | |||||
| idx = i | |||||
| val = matcher | |||||
| } | |||||
| } | |||||
| if val == nil { // not found matcher with static length | |||||
| r, err := compileMatchers(matchers[1:]) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return match.NewBTree(matchers[0], nil, r), nil | |||||
| } | |||||
| left := matchers[:idx] | |||||
| var right []match.Matcher | |||||
| if len(matchers) > idx+1 { | |||||
| right = matchers[idx+1:] | |||||
| } | |||||
| var l, r match.Matcher | |||||
| var err error | |||||
| if len(left) > 0 { | |||||
| l, err = compileMatchers(left) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| } | |||||
| if len(right) > 0 { | |||||
| r, err = compileMatchers(right) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| } | |||||
| return match.NewBTree(val, l, r), nil | |||||
| } | |||||
| func glueMatchers(matchers []match.Matcher) match.Matcher { | |||||
| if m := glueMatchersAsEvery(matchers); m != nil { | |||||
| return m | |||||
| } | |||||
| if m := glueMatchersAsRow(matchers); m != nil { | |||||
| return m | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func glueMatchersAsRow(matchers []match.Matcher) match.Matcher { | |||||
| if len(matchers) <= 1 { | |||||
| return nil | |||||
| } | |||||
| var ( | |||||
| c []match.Matcher | |||||
| l int | |||||
| ) | |||||
| for _, matcher := range matchers { | |||||
| if ml := matcher.Len(); ml == -1 { | |||||
| return nil | |||||
| } else { | |||||
| c = append(c, matcher) | |||||
| l += ml | |||||
| } | |||||
| } | |||||
| return match.NewRow(l, c...) | |||||
| } | |||||
| func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher { | |||||
| if len(matchers) <= 1 { | |||||
| return nil | |||||
| } | |||||
| var ( | |||||
| hasAny bool | |||||
| hasSuper bool | |||||
| hasSingle bool | |||||
| min int | |||||
| separator []rune | |||||
| ) | |||||
| for i, matcher := range matchers { | |||||
| var sep []rune | |||||
| switch m := matcher.(type) { | |||||
| case match.Super: | |||||
| sep = []rune{} | |||||
| hasSuper = true | |||||
| case match.Any: | |||||
| sep = m.Separators | |||||
| hasAny = true | |||||
| case match.Single: | |||||
| sep = m.Separators | |||||
| hasSingle = true | |||||
| min++ | |||||
| case match.List: | |||||
| if !m.Not { | |||||
| return nil | |||||
| } | |||||
| sep = m.List | |||||
| hasSingle = true | |||||
| min++ | |||||
| default: | |||||
| return nil | |||||
| } | |||||
| // initialize | |||||
| if i == 0 { | |||||
| separator = sep | |||||
| } | |||||
| if runes.Equal(sep, separator) { | |||||
| continue | |||||
| } | |||||
| return nil | |||||
| } | |||||
| if hasSuper && !hasAny && !hasSingle { | |||||
| return match.NewSuper() | |||||
| } | |||||
| if hasAny && !hasSuper && !hasSingle { | |||||
| return match.NewAny(separator) | |||||
| } | |||||
| if (hasAny || hasSuper) && min > 0 && len(separator) == 0 { | |||||
| return match.NewMin(min) | |||||
| } | |||||
| every := match.NewEveryOf() | |||||
| if min > 0 { | |||||
| every.Add(match.NewMin(min)) | |||||
| if !hasAny && !hasSuper { | |||||
| every.Add(match.NewMax(min)) | |||||
| } | |||||
| } | |||||
| if len(separator) > 0 { | |||||
| every.Add(match.NewContains(string(separator), true)) | |||||
| } | |||||
| return every | |||||
| } | |||||
| func minimizeMatchers(matchers []match.Matcher) []match.Matcher { | |||||
| var done match.Matcher | |||||
| var left, right, count int | |||||
| for l := 0; l < len(matchers); l++ { | |||||
| for r := len(matchers); r > l; r-- { | |||||
| if glued := glueMatchers(matchers[l:r]); glued != nil { | |||||
| var swap bool | |||||
| if done == nil { | |||||
| swap = true | |||||
| } else { | |||||
| cl, gl := done.Len(), glued.Len() | |||||
| swap = cl > -1 && gl > -1 && gl > cl | |||||
| swap = swap || count < r-l | |||||
| } | |||||
| if swap { | |||||
| done = glued | |||||
| left = l | |||||
| right = r | |||||
| count = r - l | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| if done == nil { | |||||
| return matchers | |||||
| } | |||||
| next := append(append([]match.Matcher{}, matchers[:left]...), done) | |||||
| if right < len(matchers) { | |||||
| next = append(next, matchers[right:]...) | |||||
| } | |||||
| if len(next) == len(matchers) { | |||||
| return next | |||||
| } | |||||
| return minimizeMatchers(next) | |||||
| } | |||||
| // minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree | |||||
| func minimizeTree(tree *ast.Node) *ast.Node { | |||||
| switch tree.Kind { | |||||
| case ast.KindAnyOf: | |||||
| return minimizeTreeAnyOf(tree) | |||||
| default: | |||||
| return nil | |||||
| } | |||||
| } | |||||
| // minimizeAnyOf tries to find common children of given node of AnyOf pattern | |||||
| // it searches for common children from left and from right | |||||
| // if any common children are found – then it returns new optimized ast tree | |||||
| // else it returns nil | |||||
| func minimizeTreeAnyOf(tree *ast.Node) *ast.Node { | |||||
| if !areOfSameKind(tree.Children, ast.KindPattern) { | |||||
| return nil | |||||
| } | |||||
| commonLeft, commonRight := commonChildren(tree.Children) | |||||
| commonLeftCount, commonRightCount := len(commonLeft), len(commonRight) | |||||
| if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts | |||||
| return nil | |||||
| } | |||||
| var result []*ast.Node | |||||
| if commonLeftCount > 0 { | |||||
| result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...)) | |||||
| } | |||||
| var anyOf []*ast.Node | |||||
| for _, child := range tree.Children { | |||||
| reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount] | |||||
| var node *ast.Node | |||||
| if len(reuse) == 0 { | |||||
| // this pattern is completely reduced by commonLeft and commonRight patterns | |||||
| // so it become nothing | |||||
| node = ast.NewNode(ast.KindNothing, nil) | |||||
| } else { | |||||
| node = ast.NewNode(ast.KindPattern, nil, reuse...) | |||||
| } | |||||
| anyOf = appendIfUnique(anyOf, node) | |||||
| } | |||||
| switch { | |||||
| case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing: | |||||
| result = append(result, anyOf[0]) | |||||
| case len(anyOf) > 1: | |||||
| result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...)) | |||||
| } | |||||
| if commonRightCount > 0 { | |||||
| result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...)) | |||||
| } | |||||
| return ast.NewNode(ast.KindPattern, nil, result...) | |||||
| } | |||||
| func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) { | |||||
| if len(nodes) <= 1 { | |||||
| return | |||||
| } | |||||
| // find node that has least number of children | |||||
| idx := leastChildren(nodes) | |||||
| if idx == -1 { | |||||
| return | |||||
| } | |||||
| tree := nodes[idx] | |||||
| treeLength := len(tree.Children) | |||||
| // allocate max able size for rightCommon slice | |||||
| // to get ability insert elements in reverse order (from end to start) | |||||
| // without sorting | |||||
| commonRight = make([]*ast.Node, treeLength) | |||||
| lastRight := treeLength // will use this to get results as commonRight[lastRight:] | |||||
| var ( | |||||
| breakLeft bool | |||||
| breakRight bool | |||||
| commonTotal int | |||||
| ) | |||||
| for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 { | |||||
| treeLeft := tree.Children[i] | |||||
| treeRight := tree.Children[j] | |||||
| for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ { | |||||
| // skip least children node | |||||
| if k == idx { | |||||
| continue | |||||
| } | |||||
| restLeft := nodes[k].Children[i] | |||||
| restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength] | |||||
| breakLeft = breakLeft || !treeLeft.Equal(restLeft) | |||||
| // disable searching for right common parts, if left part is already overlapping | |||||
| breakRight = breakRight || (!breakLeft && j <= i) | |||||
| breakRight = breakRight || !treeRight.Equal(restRight) | |||||
| } | |||||
| if !breakLeft { | |||||
| commonTotal++ | |||||
| commonLeft = append(commonLeft, treeLeft) | |||||
| } | |||||
| if !breakRight { | |||||
| commonTotal++ | |||||
| lastRight = j | |||||
| commonRight[j] = treeRight | |||||
| } | |||||
| } | |||||
| commonRight = commonRight[lastRight:] | |||||
| return | |||||
| } | |||||
| func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node { | |||||
| for _, n := range target { | |||||
| if reflect.DeepEqual(n, val) { | |||||
| return target | |||||
| } | |||||
| } | |||||
| return append(target, val) | |||||
| } | |||||
| func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool { | |||||
| for _, n := range nodes { | |||||
| if n.Kind != kind { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| func leastChildren(nodes []*ast.Node) int { | |||||
| min := -1 | |||||
| idx := -1 | |||||
| for i, n := range nodes { | |||||
| if idx == -1 || (len(n.Children) < min) { | |||||
| min = len(n.Children) | |||||
| idx = i | |||||
| } | |||||
| } | |||||
| return idx | |||||
| } | |||||
| func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) { | |||||
| var matchers []match.Matcher | |||||
| for _, desc := range tree.Children { | |||||
| m, err := compile(desc, sep) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| matchers = append(matchers, optimizeMatcher(m)) | |||||
| } | |||||
| return matchers, nil | |||||
| } | |||||
| func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) { | |||||
| switch tree.Kind { | |||||
| case ast.KindAnyOf: | |||||
| // todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go) | |||||
| if n := minimizeTree(tree); n != nil { | |||||
| return compile(n, sep) | |||||
| } | |||||
| matchers, err := compileTreeChildren(tree, sep) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return match.NewAnyOf(matchers...), nil | |||||
| case ast.KindPattern: | |||||
| if len(tree.Children) == 0 { | |||||
| return match.NewNothing(), nil | |||||
| } | |||||
| matchers, err := compileTreeChildren(tree, sep) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| m, err = compileMatchers(minimizeMatchers(matchers)) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| case ast.KindAny: | |||||
| m = match.NewAny(sep) | |||||
| case ast.KindSuper: | |||||
| m = match.NewSuper() | |||||
| case ast.KindSingle: | |||||
| m = match.NewSingle(sep) | |||||
| case ast.KindNothing: | |||||
| m = match.NewNothing() | |||||
| case ast.KindList: | |||||
| l := tree.Value.(ast.List) | |||||
| m = match.NewList([]rune(l.Chars), l.Not) | |||||
| case ast.KindRange: | |||||
| r := tree.Value.(ast.Range) | |||||
| m = match.NewRange(r.Lo, r.Hi, r.Not) | |||||
| case ast.KindText: | |||||
| t := tree.Value.(ast.Text) | |||||
| m = match.NewText(t.Text) | |||||
| default: | |||||
| return nil, fmt.Errorf("could not compile tree: unknown node type") | |||||
| } | |||||
| return optimizeMatcher(m), nil | |||||
| } | |||||
| func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) { | |||||
| m, err := compile(tree, sep) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return m, nil | |||||
| } | |||||
| @@ -0,0 +1,80 @@ | |||||
| package glob | |||||
| import ( | |||||
| "github.com/gobwas/glob/compiler" | |||||
| "github.com/gobwas/glob/syntax" | |||||
| ) | |||||
| // Glob represents compiled glob pattern. | |||||
| type Glob interface { | |||||
| Match(string) bool | |||||
| } | |||||
| // Compile creates Glob for given pattern and strings (if any present after pattern) as separators. | |||||
| // The pattern syntax is: | |||||
| // | |||||
| // pattern: | |||||
| // { term } | |||||
| // | |||||
| // term: | |||||
| // `*` matches any sequence of non-separator characters | |||||
| // `**` matches any sequence of characters | |||||
| // `?` matches any single non-separator character | |||||
| // `[` [ `!` ] { character-range } `]` | |||||
| // character class (must be non-empty) | |||||
| // `{` pattern-list `}` | |||||
| // pattern alternatives | |||||
| // c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`) | |||||
| // `\` c matches character c | |||||
| // | |||||
| // character-range: | |||||
| // c matches character c (c != `\\`, `-`, `]`) | |||||
| // `\` c matches character c | |||||
| // lo `-` hi matches character c for lo <= c <= hi | |||||
| // | |||||
| // pattern-list: | |||||
| // pattern { `,` pattern } | |||||
| // comma-separated (without spaces) patterns | |||||
| // | |||||
| func Compile(pattern string, separators ...rune) (Glob, error) { | |||||
| ast, err := syntax.Parse(pattern) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| matcher, err := compiler.Compile(ast, separators) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return matcher, nil | |||||
| } | |||||
| // MustCompile is the same as Compile, except that if Compile returns error, this will panic | |||||
| func MustCompile(pattern string, separators ...rune) Glob { | |||||
| g, err := Compile(pattern, separators...) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| return g | |||||
| } | |||||
| // QuoteMeta returns a string that quotes all glob pattern meta characters | |||||
| // inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`. | |||||
| func QuoteMeta(s string) string { | |||||
| b := make([]byte, 2*len(s)) | |||||
| // a byte loop is correct because all meta characters are ASCII | |||||
| j := 0 | |||||
| for i := 0; i < len(s); i++ { | |||||
| if syntax.Special(s[i]) { | |||||
| b[j] = '\\' | |||||
| j++ | |||||
| } | |||||
| b[j] = s[i] | |||||
| j++ | |||||
| } | |||||
| return string(b[0:j]) | |||||
| } | |||||
| @@ -0,0 +1,45 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/gobwas/glob/util/strings" | |||||
| ) | |||||
| type Any struct { | |||||
| Separators []rune | |||||
| } | |||||
| func NewAny(s []rune) Any { | |||||
| return Any{s} | |||||
| } | |||||
| func (self Any) Match(s string) bool { | |||||
| return strings.IndexAnyRunes(s, self.Separators) == -1 | |||||
| } | |||||
| func (self Any) Index(s string) (int, []int) { | |||||
| found := strings.IndexAnyRunes(s, self.Separators) | |||||
| switch found { | |||||
| case -1: | |||||
| case 0: | |||||
| return 0, segments0 | |||||
| default: | |||||
| s = s[:found] | |||||
| } | |||||
| segments := acquireSegments(len(s)) | |||||
| for i := range s { | |||||
| segments = append(segments, i) | |||||
| } | |||||
| segments = append(segments, len(s)) | |||||
| return 0, segments | |||||
| } | |||||
| func (self Any) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Any) String() string { | |||||
| return fmt.Sprintf("<any:![%s]>", string(self.Separators)) | |||||
| } | |||||
| @@ -0,0 +1,82 @@ | |||||
| package match | |||||
| import "fmt" | |||||
| type AnyOf struct { | |||||
| Matchers Matchers | |||||
| } | |||||
| func NewAnyOf(m ...Matcher) AnyOf { | |||||
| return AnyOf{Matchers(m)} | |||||
| } | |||||
| func (self *AnyOf) Add(m Matcher) error { | |||||
| self.Matchers = append(self.Matchers, m) | |||||
| return nil | |||||
| } | |||||
| func (self AnyOf) Match(s string) bool { | |||||
| for _, m := range self.Matchers { | |||||
| if m.Match(s) { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (self AnyOf) Index(s string) (int, []int) { | |||||
| index := -1 | |||||
| segments := acquireSegments(len(s)) | |||||
| for _, m := range self.Matchers { | |||||
| idx, seg := m.Index(s) | |||||
| if idx == -1 { | |||||
| continue | |||||
| } | |||||
| if index == -1 || idx < index { | |||||
| index = idx | |||||
| segments = append(segments[:0], seg...) | |||||
| continue | |||||
| } | |||||
| if idx > index { | |||||
| continue | |||||
| } | |||||
| // here idx == index | |||||
| segments = appendMerge(segments, seg) | |||||
| } | |||||
| if index == -1 { | |||||
| releaseSegments(segments) | |||||
| return -1, nil | |||||
| } | |||||
| return index, segments | |||||
| } | |||||
| func (self AnyOf) Len() (l int) { | |||||
| l = -1 | |||||
| for _, m := range self.Matchers { | |||||
| ml := m.Len() | |||||
| switch { | |||||
| case l == -1: | |||||
| l = ml | |||||
| continue | |||||
| case ml == -1: | |||||
| return -1 | |||||
| case l != ml: | |||||
| return -1 | |||||
| } | |||||
| } | |||||
| return | |||||
| } | |||||
| func (self AnyOf) String() string { | |||||
| return fmt.Sprintf("<any_of:[%s]>", self.Matchers) | |||||
| } | |||||
| @@ -0,0 +1,146 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type BTree struct { | |||||
| Value Matcher | |||||
| Left Matcher | |||||
| Right Matcher | |||||
| ValueLengthRunes int | |||||
| LeftLengthRunes int | |||||
| RightLengthRunes int | |||||
| LengthRunes int | |||||
| } | |||||
| func NewBTree(Value, Left, Right Matcher) (tree BTree) { | |||||
| tree.Value = Value | |||||
| tree.Left = Left | |||||
| tree.Right = Right | |||||
| lenOk := true | |||||
| if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 { | |||||
| lenOk = false | |||||
| } | |||||
| if Left != nil { | |||||
| if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 { | |||||
| lenOk = false | |||||
| } | |||||
| } | |||||
| if Right != nil { | |||||
| if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 { | |||||
| lenOk = false | |||||
| } | |||||
| } | |||||
| if lenOk { | |||||
| tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes | |||||
| } else { | |||||
| tree.LengthRunes = -1 | |||||
| } | |||||
| return tree | |||||
| } | |||||
| func (self BTree) Len() int { | |||||
| return self.LengthRunes | |||||
| } | |||||
| // todo? | |||||
| func (self BTree) Index(s string) (int, []int) { | |||||
| return -1, nil | |||||
| } | |||||
| func (self BTree) Match(s string) bool { | |||||
| inputLen := len(s) | |||||
| // self.Length, self.RLen and self.LLen are values meaning the length of runes for each part | |||||
| // here we manipulating byte length for better optimizations | |||||
| // but these checks still works, cause minLen of 1-rune string is 1 byte. | |||||
| if self.LengthRunes != -1 && self.LengthRunes > inputLen { | |||||
| return false | |||||
| } | |||||
| // try to cut unnecessary parts | |||||
| // by knowledge of length of right and left part | |||||
| var offset, limit int | |||||
| if self.LeftLengthRunes >= 0 { | |||||
| offset = self.LeftLengthRunes | |||||
| } | |||||
| if self.RightLengthRunes >= 0 { | |||||
| limit = inputLen - self.RightLengthRunes | |||||
| } else { | |||||
| limit = inputLen | |||||
| } | |||||
| for offset < limit { | |||||
| // search for matching part in substring | |||||
| index, segments := self.Value.Index(s[offset:limit]) | |||||
| if index == -1 { | |||||
| releaseSegments(segments) | |||||
| return false | |||||
| } | |||||
| l := s[:offset+index] | |||||
| var left bool | |||||
| if self.Left != nil { | |||||
| left = self.Left.Match(l) | |||||
| } else { | |||||
| left = l == "" | |||||
| } | |||||
| if left { | |||||
| for i := len(segments) - 1; i >= 0; i-- { | |||||
| length := segments[i] | |||||
| var right bool | |||||
| var r string | |||||
| // if there is no string for the right branch | |||||
| if inputLen <= offset+index+length { | |||||
| r = "" | |||||
| } else { | |||||
| r = s[offset+index+length:] | |||||
| } | |||||
| if self.Right != nil { | |||||
| right = self.Right.Match(r) | |||||
| } else { | |||||
| right = r == "" | |||||
| } | |||||
| if right { | |||||
| releaseSegments(segments) | |||||
| return true | |||||
| } | |||||
| } | |||||
| } | |||||
| _, step := utf8.DecodeRuneInString(s[offset+index:]) | |||||
| offset += index + step | |||||
| releaseSegments(segments) | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (self BTree) String() string { | |||||
| const n string = "<nil>" | |||||
| var l, r string | |||||
| if self.Left == nil { | |||||
| l = n | |||||
| } else { | |||||
| l = self.Left.String() | |||||
| } | |||||
| if self.Right == nil { | |||||
| r = n | |||||
| } else { | |||||
| r = self.Right.String() | |||||
| } | |||||
| return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r) | |||||
| } | |||||
| @@ -0,0 +1,58 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| ) | |||||
| type Contains struct { | |||||
| Needle string | |||||
| Not bool | |||||
| } | |||||
| func NewContains(needle string, not bool) Contains { | |||||
| return Contains{needle, not} | |||||
| } | |||||
| func (self Contains) Match(s string) bool { | |||||
| return strings.Contains(s, self.Needle) != self.Not | |||||
| } | |||||
| func (self Contains) Index(s string) (int, []int) { | |||||
| var offset int | |||||
| idx := strings.Index(s, self.Needle) | |||||
| if !self.Not { | |||||
| if idx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| offset = idx + len(self.Needle) | |||||
| if len(s) <= offset { | |||||
| return 0, []int{offset} | |||||
| } | |||||
| s = s[offset:] | |||||
| } else if idx != -1 { | |||||
| s = s[:idx] | |||||
| } | |||||
| segments := acquireSegments(len(s) + 1) | |||||
| for i := range s { | |||||
| segments = append(segments, offset+i) | |||||
| } | |||||
| return 0, append(segments, offset+len(s)) | |||||
| } | |||||
| func (self Contains) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Contains) String() string { | |||||
| var not string | |||||
| if self.Not { | |||||
| not = "!" | |||||
| } | |||||
| return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle) | |||||
| } | |||||
| @@ -0,0 +1,99 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| ) | |||||
| type EveryOf struct { | |||||
| Matchers Matchers | |||||
| } | |||||
| func NewEveryOf(m ...Matcher) EveryOf { | |||||
| return EveryOf{Matchers(m)} | |||||
| } | |||||
| func (self *EveryOf) Add(m Matcher) error { | |||||
| self.Matchers = append(self.Matchers, m) | |||||
| return nil | |||||
| } | |||||
| func (self EveryOf) Len() (l int) { | |||||
| for _, m := range self.Matchers { | |||||
| if ml := m.Len(); l > 0 { | |||||
| l += ml | |||||
| } else { | |||||
| return -1 | |||||
| } | |||||
| } | |||||
| return | |||||
| } | |||||
| func (self EveryOf) Index(s string) (int, []int) { | |||||
| var index int | |||||
| var offset int | |||||
| // make `in` with cap as len(s), | |||||
| // cause it is the maximum size of output segments values | |||||
| next := acquireSegments(len(s)) | |||||
| current := acquireSegments(len(s)) | |||||
| sub := s | |||||
| for i, m := range self.Matchers { | |||||
| idx, seg := m.Index(sub) | |||||
| if idx == -1 { | |||||
| releaseSegments(next) | |||||
| releaseSegments(current) | |||||
| return -1, nil | |||||
| } | |||||
| if i == 0 { | |||||
| // we use copy here instead of `current = seg` | |||||
| // cause seg is a slice from reusable buffer `in` | |||||
| // and it could be overwritten in next iteration | |||||
| current = append(current, seg...) | |||||
| } else { | |||||
| // clear the next | |||||
| next = next[:0] | |||||
| delta := index - (idx + offset) | |||||
| for _, ex := range current { | |||||
| for _, n := range seg { | |||||
| if ex+delta == n { | |||||
| next = append(next, n) | |||||
| } | |||||
| } | |||||
| } | |||||
| if len(next) == 0 { | |||||
| releaseSegments(next) | |||||
| releaseSegments(current) | |||||
| return -1, nil | |||||
| } | |||||
| current = append(current[:0], next...) | |||||
| } | |||||
| index = idx + offset | |||||
| sub = s[index:] | |||||
| offset += idx | |||||
| } | |||||
| releaseSegments(next) | |||||
| return index, current | |||||
| } | |||||
| func (self EveryOf) Match(s string) bool { | |||||
| for _, m := range self.Matchers { | |||||
| if !m.Match(s) { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (self EveryOf) String() string { | |||||
| return fmt.Sprintf("<every_of:[%s]>", self.Matchers) | |||||
| } | |||||
| @@ -0,0 +1,49 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/gobwas/glob/util/runes" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type List struct { | |||||
| List []rune | |||||
| Not bool | |||||
| } | |||||
| func NewList(list []rune, not bool) List { | |||||
| return List{list, not} | |||||
| } | |||||
| func (self List) Match(s string) bool { | |||||
| r, w := utf8.DecodeRuneInString(s) | |||||
| if len(s) > w { | |||||
| return false | |||||
| } | |||||
| inList := runes.IndexRune(self.List, r) != -1 | |||||
| return inList == !self.Not | |||||
| } | |||||
| func (self List) Len() int { | |||||
| return lenOne | |||||
| } | |||||
| func (self List) Index(s string) (int, []int) { | |||||
| for i, r := range s { | |||||
| if self.Not == (runes.IndexRune(self.List, r) == -1) { | |||||
| return i, segmentsByRuneLength[utf8.RuneLen(r)] | |||||
| } | |||||
| } | |||||
| return -1, nil | |||||
| } | |||||
| func (self List) String() string { | |||||
| var not string | |||||
| if self.Not { | |||||
| not = "!" | |||||
| } | |||||
| return fmt.Sprintf("<list:%s[%s]>", not, string(self.List)) | |||||
| } | |||||
| @@ -0,0 +1,81 @@ | |||||
| package match | |||||
| // todo common table of rune's length | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| ) | |||||
| const lenOne = 1 | |||||
| const lenZero = 0 | |||||
| const lenNo = -1 | |||||
| type Matcher interface { | |||||
| Match(string) bool | |||||
| Index(string) (int, []int) | |||||
| Len() int | |||||
| String() string | |||||
| } | |||||
| type Matchers []Matcher | |||||
| func (m Matchers) String() string { | |||||
| var s []string | |||||
| for _, matcher := range m { | |||||
| s = append(s, fmt.Sprint(matcher)) | |||||
| } | |||||
| return fmt.Sprintf("%s", strings.Join(s, ",")) | |||||
| } | |||||
| // appendMerge merges and sorts given already SORTED and UNIQUE segments. | |||||
| func appendMerge(target, sub []int) []int { | |||||
| lt, ls := len(target), len(sub) | |||||
| out := make([]int, 0, lt+ls) | |||||
| for x, y := 0, 0; x < lt || y < ls; { | |||||
| if x >= lt { | |||||
| out = append(out, sub[y:]...) | |||||
| break | |||||
| } | |||||
| if y >= ls { | |||||
| out = append(out, target[x:]...) | |||||
| break | |||||
| } | |||||
| xValue := target[x] | |||||
| yValue := sub[y] | |||||
| switch { | |||||
| case xValue == yValue: | |||||
| out = append(out, xValue) | |||||
| x++ | |||||
| y++ | |||||
| case xValue < yValue: | |||||
| out = append(out, xValue) | |||||
| x++ | |||||
| case yValue < xValue: | |||||
| out = append(out, yValue) | |||||
| y++ | |||||
| } | |||||
| } | |||||
| target = append(target[:0], out...) | |||||
| return target | |||||
| } | |||||
| func reverseSegments(input []int) { | |||||
| l := len(input) | |||||
| m := l / 2 | |||||
| for i := 0; i < m; i++ { | |||||
| input[i], input[l-i-1] = input[l-i-1], input[i] | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,49 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type Max struct { | |||||
| Limit int | |||||
| } | |||||
| func NewMax(l int) Max { | |||||
| return Max{l} | |||||
| } | |||||
| func (self Max) Match(s string) bool { | |||||
| var l int | |||||
| for range s { | |||||
| l += 1 | |||||
| if l > self.Limit { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (self Max) Index(s string) (int, []int) { | |||||
| segments := acquireSegments(self.Limit + 1) | |||||
| segments = append(segments, 0) | |||||
| var count int | |||||
| for i, r := range s { | |||||
| count++ | |||||
| if count > self.Limit { | |||||
| break | |||||
| } | |||||
| segments = append(segments, i+utf8.RuneLen(r)) | |||||
| } | |||||
| return 0, segments | |||||
| } | |||||
| func (self Max) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Max) String() string { | |||||
| return fmt.Sprintf("<max:%d>", self.Limit) | |||||
| } | |||||
| @@ -0,0 +1,57 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type Min struct { | |||||
| Limit int | |||||
| } | |||||
| func NewMin(l int) Min { | |||||
| return Min{l} | |||||
| } | |||||
| func (self Min) Match(s string) bool { | |||||
| var l int | |||||
| for range s { | |||||
| l += 1 | |||||
| if l >= self.Limit { | |||||
| return true | |||||
| } | |||||
| } | |||||
| return false | |||||
| } | |||||
| func (self Min) Index(s string) (int, []int) { | |||||
| var count int | |||||
| c := len(s) - self.Limit + 1 | |||||
| if c <= 0 { | |||||
| return -1, nil | |||||
| } | |||||
| segments := acquireSegments(c) | |||||
| for i, r := range s { | |||||
| count++ | |||||
| if count >= self.Limit { | |||||
| segments = append(segments, i+utf8.RuneLen(r)) | |||||
| } | |||||
| } | |||||
| if len(segments) == 0 { | |||||
| return -1, nil | |||||
| } | |||||
| return 0, segments | |||||
| } | |||||
| func (self Min) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Min) String() string { | |||||
| return fmt.Sprintf("<min:%d>", self.Limit) | |||||
| } | |||||
| @@ -0,0 +1,27 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| ) | |||||
| type Nothing struct{} | |||||
| func NewNothing() Nothing { | |||||
| return Nothing{} | |||||
| } | |||||
| func (self Nothing) Match(s string) bool { | |||||
| return len(s) == 0 | |||||
| } | |||||
| func (self Nothing) Index(s string) (int, []int) { | |||||
| return 0, segments0 | |||||
| } | |||||
| func (self Nothing) Len() int { | |||||
| return lenZero | |||||
| } | |||||
| func (self Nothing) String() string { | |||||
| return fmt.Sprintf("<nothing>") | |||||
| } | |||||
| @@ -0,0 +1,50 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type Prefix struct { | |||||
| Prefix string | |||||
| } | |||||
| func NewPrefix(p string) Prefix { | |||||
| return Prefix{p} | |||||
| } | |||||
| func (self Prefix) Index(s string) (int, []int) { | |||||
| idx := strings.Index(s, self.Prefix) | |||||
| if idx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| length := len(self.Prefix) | |||||
| var sub string | |||||
| if len(s) > idx+length { | |||||
| sub = s[idx+length:] | |||||
| } else { | |||||
| sub = "" | |||||
| } | |||||
| segments := acquireSegments(len(sub) + 1) | |||||
| segments = append(segments, length) | |||||
| for i, r := range sub { | |||||
| segments = append(segments, length+i+utf8.RuneLen(r)) | |||||
| } | |||||
| return idx, segments | |||||
| } | |||||
| func (self Prefix) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Prefix) Match(s string) bool { | |||||
| return strings.HasPrefix(s, self.Prefix) | |||||
| } | |||||
| func (self Prefix) String() string { | |||||
| return fmt.Sprintf("<prefix:%s>", self.Prefix) | |||||
| } | |||||
| @@ -0,0 +1,55 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| "unicode/utf8" | |||||
| sutil "github.com/gobwas/glob/util/strings" | |||||
| ) | |||||
| type PrefixAny struct { | |||||
| Prefix string | |||||
| Separators []rune | |||||
| } | |||||
| func NewPrefixAny(s string, sep []rune) PrefixAny { | |||||
| return PrefixAny{s, sep} | |||||
| } | |||||
| func (self PrefixAny) Index(s string) (int, []int) { | |||||
| idx := strings.Index(s, self.Prefix) | |||||
| if idx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| n := len(self.Prefix) | |||||
| sub := s[idx+n:] | |||||
| i := sutil.IndexAnyRunes(sub, self.Separators) | |||||
| if i > -1 { | |||||
| sub = sub[:i] | |||||
| } | |||||
| seg := acquireSegments(len(sub) + 1) | |||||
| seg = append(seg, n) | |||||
| for i, r := range sub { | |||||
| seg = append(seg, n+i+utf8.RuneLen(r)) | |||||
| } | |||||
| return idx, seg | |||||
| } | |||||
| func (self PrefixAny) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self PrefixAny) Match(s string) bool { | |||||
| if !strings.HasPrefix(s, self.Prefix) { | |||||
| return false | |||||
| } | |||||
| return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1 | |||||
| } | |||||
| func (self PrefixAny) String() string { | |||||
| return fmt.Sprintf("<prefix_any:%s![%s]>", self.Prefix, string(self.Separators)) | |||||
| } | |||||
| @@ -0,0 +1,62 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| ) | |||||
| type PrefixSuffix struct { | |||||
| Prefix, Suffix string | |||||
| } | |||||
| func NewPrefixSuffix(p, s string) PrefixSuffix { | |||||
| return PrefixSuffix{p, s} | |||||
| } | |||||
| func (self PrefixSuffix) Index(s string) (int, []int) { | |||||
| prefixIdx := strings.Index(s, self.Prefix) | |||||
| if prefixIdx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| suffixLen := len(self.Suffix) | |||||
| if suffixLen <= 0 { | |||||
| return prefixIdx, []int{len(s) - prefixIdx} | |||||
| } | |||||
| if (len(s) - prefixIdx) <= 0 { | |||||
| return -1, nil | |||||
| } | |||||
| segments := acquireSegments(len(s) - prefixIdx) | |||||
| for sub := s[prefixIdx:]; ; { | |||||
| suffixIdx := strings.LastIndex(sub, self.Suffix) | |||||
| if suffixIdx == -1 { | |||||
| break | |||||
| } | |||||
| segments = append(segments, suffixIdx+suffixLen) | |||||
| sub = sub[:suffixIdx] | |||||
| } | |||||
| if len(segments) == 0 { | |||||
| releaseSegments(segments) | |||||
| return -1, nil | |||||
| } | |||||
| reverseSegments(segments) | |||||
| return prefixIdx, segments | |||||
| } | |||||
| func (self PrefixSuffix) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self PrefixSuffix) Match(s string) bool { | |||||
| return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix) | |||||
| } | |||||
| func (self PrefixSuffix) String() string { | |||||
| return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix) | |||||
| } | |||||
| @@ -0,0 +1,48 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type Range struct { | |||||
| Lo, Hi rune | |||||
| Not bool | |||||
| } | |||||
| func NewRange(lo, hi rune, not bool) Range { | |||||
| return Range{lo, hi, not} | |||||
| } | |||||
| func (self Range) Len() int { | |||||
| return lenOne | |||||
| } | |||||
| func (self Range) Match(s string) bool { | |||||
| r, w := utf8.DecodeRuneInString(s) | |||||
| if len(s) > w { | |||||
| return false | |||||
| } | |||||
| inRange := r >= self.Lo && r <= self.Hi | |||||
| return inRange == !self.Not | |||||
| } | |||||
| func (self Range) Index(s string) (int, []int) { | |||||
| for i, r := range s { | |||||
| if self.Not != (r >= self.Lo && r <= self.Hi) { | |||||
| return i, segmentsByRuneLength[utf8.RuneLen(r)] | |||||
| } | |||||
| } | |||||
| return -1, nil | |||||
| } | |||||
| func (self Range) String() string { | |||||
| var not string | |||||
| if self.Not { | |||||
| not = "!" | |||||
| } | |||||
| return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi)) | |||||
| } | |||||
| @@ -0,0 +1,77 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| ) | |||||
| type Row struct { | |||||
| Matchers Matchers | |||||
| RunesLength int | |||||
| Segments []int | |||||
| } | |||||
| func NewRow(len int, m ...Matcher) Row { | |||||
| return Row{ | |||||
| Matchers: Matchers(m), | |||||
| RunesLength: len, | |||||
| Segments: []int{len}, | |||||
| } | |||||
| } | |||||
| func (self Row) matchAll(s string) bool { | |||||
| var idx int | |||||
| for _, m := range self.Matchers { | |||||
| length := m.Len() | |||||
| var next, i int | |||||
| for next = range s[idx:] { | |||||
| i++ | |||||
| if i == length { | |||||
| break | |||||
| } | |||||
| } | |||||
| if i < length || !m.Match(s[idx:idx+next+1]) { | |||||
| return false | |||||
| } | |||||
| idx += next + 1 | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (self Row) lenOk(s string) bool { | |||||
| var i int | |||||
| for range s { | |||||
| i++ | |||||
| if i > self.RunesLength { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return self.RunesLength == i | |||||
| } | |||||
| func (self Row) Match(s string) bool { | |||||
| return self.lenOk(s) && self.matchAll(s) | |||||
| } | |||||
| func (self Row) Len() (l int) { | |||||
| return self.RunesLength | |||||
| } | |||||
| func (self Row) Index(s string) (int, []int) { | |||||
| for i := range s { | |||||
| if len(s[i:]) < self.RunesLength { | |||||
| break | |||||
| } | |||||
| if self.matchAll(s[i:]) { | |||||
| return i, self.Segments | |||||
| } | |||||
| } | |||||
| return -1, nil | |||||
| } | |||||
| func (self Row) String() string { | |||||
| return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers) | |||||
| } | |||||
| @@ -0,0 +1,91 @@ | |||||
| package match | |||||
| import ( | |||||
| "sync" | |||||
| ) | |||||
| type SomePool interface { | |||||
| Get() []int | |||||
| Put([]int) | |||||
| } | |||||
| var segmentsPools [1024]sync.Pool | |||||
| func toPowerOfTwo(v int) int { | |||||
| v-- | |||||
| v |= v >> 1 | |||||
| v |= v >> 2 | |||||
| v |= v >> 4 | |||||
| v |= v >> 8 | |||||
| v |= v >> 16 | |||||
| v++ | |||||
| return v | |||||
| } | |||||
| const ( | |||||
| cacheFrom = 16 | |||||
| cacheToAndHigher = 1024 | |||||
| cacheFromIndex = 15 | |||||
| cacheToAndHigherIndex = 1023 | |||||
| ) | |||||
| var ( | |||||
| segments0 = []int{0} | |||||
| segments1 = []int{1} | |||||
| segments2 = []int{2} | |||||
| segments3 = []int{3} | |||||
| segments4 = []int{4} | |||||
| ) | |||||
| var segmentsByRuneLength [5][]int = [5][]int{ | |||||
| 0: segments0, | |||||
| 1: segments1, | |||||
| 2: segments2, | |||||
| 3: segments3, | |||||
| 4: segments4, | |||||
| } | |||||
| func init() { | |||||
| for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 { | |||||
| func(i int) { | |||||
| segmentsPools[i-1] = sync.Pool{New: func() interface{} { | |||||
| return make([]int, 0, i) | |||||
| }} | |||||
| }(i) | |||||
| } | |||||
| } | |||||
| func getTableIndex(c int) int { | |||||
| p := toPowerOfTwo(c) | |||||
| switch { | |||||
| case p >= cacheToAndHigher: | |||||
| return cacheToAndHigherIndex | |||||
| case p <= cacheFrom: | |||||
| return cacheFromIndex | |||||
| default: | |||||
| return p - 1 | |||||
| } | |||||
| } | |||||
| func acquireSegments(c int) []int { | |||||
| // make []int with less capacity than cacheFrom | |||||
| // is faster than acquiring it from pool | |||||
| if c < cacheFrom { | |||||
| return make([]int, 0, c) | |||||
| } | |||||
| return segmentsPools[getTableIndex(c)].Get().([]int)[:0] | |||||
| } | |||||
| func releaseSegments(s []int) { | |||||
| c := cap(s) | |||||
| // make []int with less capacity than cacheFrom | |||||
| // is faster than acquiring it from pool | |||||
| if c < cacheFrom { | |||||
| return | |||||
| } | |||||
| segmentsPools[getTableIndex(c)].Put(s) | |||||
| } | |||||
| @@ -0,0 +1,43 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "github.com/gobwas/glob/util/runes" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| // single represents ? | |||||
| type Single struct { | |||||
| Separators []rune | |||||
| } | |||||
| func NewSingle(s []rune) Single { | |||||
| return Single{s} | |||||
| } | |||||
| func (self Single) Match(s string) bool { | |||||
| r, w := utf8.DecodeRuneInString(s) | |||||
| if len(s) > w { | |||||
| return false | |||||
| } | |||||
| return runes.IndexRune(self.Separators, r) == -1 | |||||
| } | |||||
| func (self Single) Len() int { | |||||
| return lenOne | |||||
| } | |||||
| func (self Single) Index(s string) (int, []int) { | |||||
| for i, r := range s { | |||||
| if runes.IndexRune(self.Separators, r) == -1 { | |||||
| return i, segmentsByRuneLength[utf8.RuneLen(r)] | |||||
| } | |||||
| } | |||||
| return -1, nil | |||||
| } | |||||
| func (self Single) String() string { | |||||
| return fmt.Sprintf("<single:![%s]>", string(self.Separators)) | |||||
| } | |||||
| @@ -0,0 +1,35 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| ) | |||||
| type Suffix struct { | |||||
| Suffix string | |||||
| } | |||||
| func NewSuffix(s string) Suffix { | |||||
| return Suffix{s} | |||||
| } | |||||
| func (self Suffix) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Suffix) Match(s string) bool { | |||||
| return strings.HasSuffix(s, self.Suffix) | |||||
| } | |||||
| func (self Suffix) Index(s string) (int, []int) { | |||||
| idx := strings.Index(s, self.Suffix) | |||||
| if idx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| return 0, []int{idx + len(self.Suffix)} | |||||
| } | |||||
| func (self Suffix) String() string { | |||||
| return fmt.Sprintf("<suffix:%s>", self.Suffix) | |||||
| } | |||||
| @@ -0,0 +1,43 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| sutil "github.com/gobwas/glob/util/strings" | |||||
| ) | |||||
| type SuffixAny struct { | |||||
| Suffix string | |||||
| Separators []rune | |||||
| } | |||||
| func NewSuffixAny(s string, sep []rune) SuffixAny { | |||||
| return SuffixAny{s, sep} | |||||
| } | |||||
| func (self SuffixAny) Index(s string) (int, []int) { | |||||
| idx := strings.Index(s, self.Suffix) | |||||
| if idx == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1 | |||||
| return i, []int{idx + len(self.Suffix) - i} | |||||
| } | |||||
| func (self SuffixAny) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self SuffixAny) Match(s string) bool { | |||||
| if !strings.HasSuffix(s, self.Suffix) { | |||||
| return false | |||||
| } | |||||
| return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1 | |||||
| } | |||||
| func (self SuffixAny) String() string { | |||||
| return fmt.Sprintf("<suffix_any:![%s]%s>", string(self.Separators), self.Suffix) | |||||
| } | |||||
| @@ -0,0 +1,33 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| ) | |||||
| type Super struct{} | |||||
| func NewSuper() Super { | |||||
| return Super{} | |||||
| } | |||||
| func (self Super) Match(s string) bool { | |||||
| return true | |||||
| } | |||||
| func (self Super) Len() int { | |||||
| return lenNo | |||||
| } | |||||
| func (self Super) Index(s string) (int, []int) { | |||||
| segments := acquireSegments(len(s) + 1) | |||||
| for i := range s { | |||||
| segments = append(segments, i) | |||||
| } | |||||
| segments = append(segments, len(s)) | |||||
| return 0, segments | |||||
| } | |||||
| func (self Super) String() string { | |||||
| return fmt.Sprintf("<super>") | |||||
| } | |||||
| @@ -0,0 +1,45 @@ | |||||
| package match | |||||
| import ( | |||||
| "fmt" | |||||
| "strings" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| // raw represents raw string to match | |||||
| type Text struct { | |||||
| Str string | |||||
| RunesLength int | |||||
| BytesLength int | |||||
| Segments []int | |||||
| } | |||||
| func NewText(s string) Text { | |||||
| return Text{ | |||||
| Str: s, | |||||
| RunesLength: utf8.RuneCountInString(s), | |||||
| BytesLength: len(s), | |||||
| Segments: []int{len(s)}, | |||||
| } | |||||
| } | |||||
| func (self Text) Match(s string) bool { | |||||
| return self.Str == s | |||||
| } | |||||
| func (self Text) Len() int { | |||||
| return self.RunesLength | |||||
| } | |||||
| func (self Text) Index(s string) (int, []int) { | |||||
| index := strings.Index(s, self.Str) | |||||
| if index == -1 { | |||||
| return -1, nil | |||||
| } | |||||
| return index, self.Segments | |||||
| } | |||||
| func (self Text) String() string { | |||||
| return fmt.Sprintf("<text:`%v`>", self.Str) | |||||
| } | |||||
| @@ -0,0 +1,148 @@ | |||||
| # glob.[go](https://golang.org) | |||||
| [![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url] | |||||
| > Go Globbing Library. | |||||
| ## Install | |||||
| ```shell | |||||
| go get github.com/gobwas/glob | |||||
| ``` | |||||
| ## Example | |||||
| ```go | |||||
| package main | |||||
| import "github.com/gobwas/glob" | |||||
| func main() { | |||||
| var g glob.Glob | |||||
| // create simple glob | |||||
| g = glob.MustCompile("*.github.com") | |||||
| g.Match("api.github.com") // true | |||||
| // quote meta characters and then create simple glob | |||||
| g = glob.MustCompile(glob.QuoteMeta("*.github.com")) | |||||
| g.Match("*.github.com") // true | |||||
| // create new glob with set of delimiters as ["."] | |||||
| g = glob.MustCompile("api.*.com", '.') | |||||
| g.Match("api.github.com") // true | |||||
| g.Match("api.gi.hub.com") // false | |||||
| // create new glob with set of delimiters as ["."] | |||||
| // but now with super wildcard | |||||
| g = glob.MustCompile("api.**.com", '.') | |||||
| g.Match("api.github.com") // true | |||||
| g.Match("api.gi.hub.com") // true | |||||
| // create glob with single symbol wildcard | |||||
| g = glob.MustCompile("?at") | |||||
| g.Match("cat") // true | |||||
| g.Match("fat") // true | |||||
| g.Match("at") // false | |||||
| // create glob with single symbol wildcard and delimiters ['f'] | |||||
| g = glob.MustCompile("?at", 'f') | |||||
| g.Match("cat") // true | |||||
| g.Match("fat") // false | |||||
| g.Match("at") // false | |||||
| // create glob with character-list matchers | |||||
| g = glob.MustCompile("[abc]at") | |||||
| g.Match("cat") // true | |||||
| g.Match("bat") // true | |||||
| g.Match("fat") // false | |||||
| g.Match("at") // false | |||||
| // create glob with character-list matchers | |||||
| g = glob.MustCompile("[!abc]at") | |||||
| g.Match("cat") // false | |||||
| g.Match("bat") // false | |||||
| g.Match("fat") // true | |||||
| g.Match("at") // false | |||||
| // create glob with character-range matchers | |||||
| g = glob.MustCompile("[a-c]at") | |||||
| g.Match("cat") // true | |||||
| g.Match("bat") // true | |||||
| g.Match("fat") // false | |||||
| g.Match("at") // false | |||||
| // create glob with character-range matchers | |||||
| g = glob.MustCompile("[!a-c]at") | |||||
| g.Match("cat") // false | |||||
| g.Match("bat") // false | |||||
| g.Match("fat") // true | |||||
| g.Match("at") // false | |||||
| // create glob with pattern-alternatives list | |||||
| g = glob.MustCompile("{cat,bat,[fr]at}") | |||||
| g.Match("cat") // true | |||||
| g.Match("bat") // true | |||||
| g.Match("fat") // true | |||||
| g.Match("rat") // true | |||||
| g.Match("at") // false | |||||
| g.Match("zat") // false | |||||
| } | |||||
| ``` | |||||
| ## Performance | |||||
| This library is created for compile-once patterns. This means, that compilation could take time, but | |||||
| strings matching is done faster, than in case when always parsing template. | |||||
| If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower. | |||||
| Run `go test -bench=.` from source root to see the benchmarks: | |||||
| Pattern | Fixture | Match | Speed (ns/op) | |||||
| --------|---------|-------|-------------- | |||||
| `[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432 | |||||
| `[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199 | |||||
| `https://*.google.*` | `https://account.google.com` | `true` | 96 | |||||
| `https://*.google.*` | `https://google.com` | `false` | 66 | |||||
| `{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163 | |||||
| `{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197 | |||||
| `{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22 | |||||
| `{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24 | |||||
| `abc*` | `abcdef` | `true` | 8.15 | |||||
| `abc*` | `af` | `false` | 5.68 | |||||
| `*def` | `abcdef` | `true` | 8.84 | |||||
| `*def` | `af` | `false` | 5.74 | |||||
| `ab*ef` | `abcdef` | `true` | 15.2 | |||||
| `ab*ef` | `af` | `false` | 10.4 | |||||
| The same things with `regexp` package: | |||||
| Pattern | Fixture | Match | Speed (ns/op) | |||||
| --------|---------|-------|-------------- | |||||
| `^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553 | |||||
| `^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383 | |||||
| `^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205 | |||||
| `^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767 | |||||
| `^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435 | |||||
| `^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674 | |||||
| `^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039 | |||||
| `^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272 | |||||
| `^abc.*$` | `abcdef` | `true` | 237 | |||||
| `^abc.*$` | `af` | `false` | 100 | |||||
| `^.*def$` | `abcdef` | `true` | 464 | |||||
| `^.*def$` | `af` | `false` | 265 | |||||
| `^ab.*ef$` | `abcdef` | `true` | 375 | |||||
| `^ab.*ef$` | `af` | `false` | 145 | |||||
| [godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg | |||||
| [godoc-url]: https://godoc.org/github.com/gobwas/glob | |||||
| [travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master | |||||
| [travis-url]: https://travis-ci.org/gobwas/glob | |||||
| ## Syntax | |||||
| Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm), | |||||
| except that `**` is aka super-asterisk, that do not sensitive for separators. | |||||
| @@ -0,0 +1,122 @@ | |||||
| package ast | |||||
| import ( | |||||
| "bytes" | |||||
| "fmt" | |||||
| ) | |||||
| type Node struct { | |||||
| Parent *Node | |||||
| Children []*Node | |||||
| Value interface{} | |||||
| Kind Kind | |||||
| } | |||||
| func NewNode(k Kind, v interface{}, ch ...*Node) *Node { | |||||
| n := &Node{ | |||||
| Kind: k, | |||||
| Value: v, | |||||
| } | |||||
| for _, c := range ch { | |||||
| Insert(n, c) | |||||
| } | |||||
| return n | |||||
| } | |||||
| func (a *Node) Equal(b *Node) bool { | |||||
| if a.Kind != b.Kind { | |||||
| return false | |||||
| } | |||||
| if a.Value != b.Value { | |||||
| return false | |||||
| } | |||||
| if len(a.Children) != len(b.Children) { | |||||
| return false | |||||
| } | |||||
| for i, c := range a.Children { | |||||
| if !c.Equal(b.Children[i]) { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| func (a *Node) String() string { | |||||
| var buf bytes.Buffer | |||||
| buf.WriteString(a.Kind.String()) | |||||
| if a.Value != nil { | |||||
| buf.WriteString(" =") | |||||
| buf.WriteString(fmt.Sprintf("%v", a.Value)) | |||||
| } | |||||
| if len(a.Children) > 0 { | |||||
| buf.WriteString(" [") | |||||
| for i, c := range a.Children { | |||||
| if i > 0 { | |||||
| buf.WriteString(", ") | |||||
| } | |||||
| buf.WriteString(c.String()) | |||||
| } | |||||
| buf.WriteString("]") | |||||
| } | |||||
| return buf.String() | |||||
| } | |||||
| func Insert(parent *Node, children ...*Node) { | |||||
| parent.Children = append(parent.Children, children...) | |||||
| for _, ch := range children { | |||||
| ch.Parent = parent | |||||
| } | |||||
| } | |||||
| type List struct { | |||||
| Not bool | |||||
| Chars string | |||||
| } | |||||
| type Range struct { | |||||
| Not bool | |||||
| Lo, Hi rune | |||||
| } | |||||
| type Text struct { | |||||
| Text string | |||||
| } | |||||
| type Kind int | |||||
| const ( | |||||
| KindNothing Kind = iota | |||||
| KindPattern | |||||
| KindList | |||||
| KindRange | |||||
| KindText | |||||
| KindAny | |||||
| KindSuper | |||||
| KindSingle | |||||
| KindAnyOf | |||||
| ) | |||||
| func (k Kind) String() string { | |||||
| switch k { | |||||
| case KindNothing: | |||||
| return "Nothing" | |||||
| case KindPattern: | |||||
| return "Pattern" | |||||
| case KindList: | |||||
| return "List" | |||||
| case KindRange: | |||||
| return "Range" | |||||
| case KindText: | |||||
| return "Text" | |||||
| case KindAny: | |||||
| return "Any" | |||||
| case KindSuper: | |||||
| return "Super" | |||||
| case KindSingle: | |||||
| return "Single" | |||||
| case KindAnyOf: | |||||
| return "AnyOf" | |||||
| default: | |||||
| return "" | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,157 @@ | |||||
| package ast | |||||
| import ( | |||||
| "errors" | |||||
| "fmt" | |||||
| "github.com/gobwas/glob/syntax/lexer" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| type Lexer interface { | |||||
| Next() lexer.Token | |||||
| } | |||||
| type parseFn func(*Node, Lexer) (parseFn, *Node, error) | |||||
| func Parse(lexer Lexer) (*Node, error) { | |||||
| var parser parseFn | |||||
| root := NewNode(KindPattern, nil) | |||||
| var ( | |||||
| tree *Node | |||||
| err error | |||||
| ) | |||||
| for parser, tree = parserMain, root; parser != nil; { | |||||
| parser, tree, err = parser(tree, lexer) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| } | |||||
| return root, nil | |||||
| } | |||||
| func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) { | |||||
| for { | |||||
| token := lex.Next() | |||||
| switch token.Type { | |||||
| case lexer.EOF: | |||||
| return nil, tree, nil | |||||
| case lexer.Error: | |||||
| return nil, tree, errors.New(token.Raw) | |||||
| case lexer.Text: | |||||
| Insert(tree, NewNode(KindText, Text{token.Raw})) | |||||
| return parserMain, tree, nil | |||||
| case lexer.Any: | |||||
| Insert(tree, NewNode(KindAny, nil)) | |||||
| return parserMain, tree, nil | |||||
| case lexer.Super: | |||||
| Insert(tree, NewNode(KindSuper, nil)) | |||||
| return parserMain, tree, nil | |||||
| case lexer.Single: | |||||
| Insert(tree, NewNode(KindSingle, nil)) | |||||
| return parserMain, tree, nil | |||||
| case lexer.RangeOpen: | |||||
| return parserRange, tree, nil | |||||
| case lexer.TermsOpen: | |||||
| a := NewNode(KindAnyOf, nil) | |||||
| Insert(tree, a) | |||||
| p := NewNode(KindPattern, nil) | |||||
| Insert(a, p) | |||||
| return parserMain, p, nil | |||||
| case lexer.Separator: | |||||
| p := NewNode(KindPattern, nil) | |||||
| Insert(tree.Parent, p) | |||||
| return parserMain, p, nil | |||||
| case lexer.TermsClose: | |||||
| return parserMain, tree.Parent.Parent, nil | |||||
| default: | |||||
| return nil, tree, fmt.Errorf("unexpected token: %s", token) | |||||
| } | |||||
| } | |||||
| return nil, tree, fmt.Errorf("unknown error") | |||||
| } | |||||
| func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) { | |||||
| var ( | |||||
| not bool | |||||
| lo rune | |||||
| hi rune | |||||
| chars string | |||||
| ) | |||||
| for { | |||||
| token := lex.Next() | |||||
| switch token.Type { | |||||
| case lexer.EOF: | |||||
| return nil, tree, errors.New("unexpected end") | |||||
| case lexer.Error: | |||||
| return nil, tree, errors.New(token.Raw) | |||||
| case lexer.Not: | |||||
| not = true | |||||
| case lexer.RangeLo: | |||||
| r, w := utf8.DecodeRuneInString(token.Raw) | |||||
| if len(token.Raw) > w { | |||||
| return nil, tree, fmt.Errorf("unexpected length of lo character") | |||||
| } | |||||
| lo = r | |||||
| case lexer.RangeBetween: | |||||
| // | |||||
| case lexer.RangeHi: | |||||
| r, w := utf8.DecodeRuneInString(token.Raw) | |||||
| if len(token.Raw) > w { | |||||
| return nil, tree, fmt.Errorf("unexpected length of lo character") | |||||
| } | |||||
| hi = r | |||||
| if hi < lo { | |||||
| return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo)) | |||||
| } | |||||
| case lexer.Text: | |||||
| chars = token.Raw | |||||
| case lexer.RangeClose: | |||||
| isRange := lo != 0 && hi != 0 | |||||
| isChars := chars != "" | |||||
| if isChars == isRange { | |||||
| return nil, tree, fmt.Errorf("could not parse range") | |||||
| } | |||||
| if isRange { | |||||
| Insert(tree, NewNode(KindRange, Range{ | |||||
| Lo: lo, | |||||
| Hi: hi, | |||||
| Not: not, | |||||
| })) | |||||
| } else { | |||||
| Insert(tree, NewNode(KindList, List{ | |||||
| Chars: chars, | |||||
| Not: not, | |||||
| })) | |||||
| } | |||||
| return parserMain, tree, nil | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,273 @@ | |||||
| package lexer | |||||
| import ( | |||||
| "bytes" | |||||
| "fmt" | |||||
| "github.com/gobwas/glob/util/runes" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| const ( | |||||
| char_any = '*' | |||||
| char_comma = ',' | |||||
| char_single = '?' | |||||
| char_escape = '\\' | |||||
| char_range_open = '[' | |||||
| char_range_close = ']' | |||||
| char_terms_open = '{' | |||||
| char_terms_close = '}' | |||||
| char_range_not = '!' | |||||
| char_range_between = '-' | |||||
| ) | |||||
| var specials = []byte{ | |||||
| char_any, | |||||
| char_single, | |||||
| char_escape, | |||||
| char_range_open, | |||||
| char_range_close, | |||||
| char_terms_open, | |||||
| char_terms_close, | |||||
| } | |||||
| func Special(c byte) bool { | |||||
| return bytes.IndexByte(specials, c) != -1 | |||||
| } | |||||
| type tokens []Token | |||||
| func (i *tokens) shift() (ret Token) { | |||||
| ret = (*i)[0] | |||||
| copy(*i, (*i)[1:]) | |||||
| *i = (*i)[:len(*i)-1] | |||||
| return | |||||
| } | |||||
| func (i *tokens) push(v Token) { | |||||
| *i = append(*i, v) | |||||
| } | |||||
| func (i *tokens) empty() bool { | |||||
| return len(*i) == 0 | |||||
| } | |||||
| var eof rune = 0 | |||||
| type lexer struct { | |||||
| data string | |||||
| pos int | |||||
| err error | |||||
| tokens tokens | |||||
| termsLevel int | |||||
| lastRune rune | |||||
| lastRuneSize int | |||||
| hasRune bool | |||||
| } | |||||
| func NewLexer(source string) *lexer { | |||||
| l := &lexer{ | |||||
| data: source, | |||||
| tokens: tokens(make([]Token, 0, 4)), | |||||
| } | |||||
| return l | |||||
| } | |||||
| func (l *lexer) Next() Token { | |||||
| if l.err != nil { | |||||
| return Token{Error, l.err.Error()} | |||||
| } | |||||
| if !l.tokens.empty() { | |||||
| return l.tokens.shift() | |||||
| } | |||||
| l.fetchItem() | |||||
| return l.Next() | |||||
| } | |||||
| func (l *lexer) peek() (r rune, w int) { | |||||
| if l.pos == len(l.data) { | |||||
| return eof, 0 | |||||
| } | |||||
| r, w = utf8.DecodeRuneInString(l.data[l.pos:]) | |||||
| if r == utf8.RuneError { | |||||
| l.errorf("could not read rune") | |||||
| r = eof | |||||
| w = 0 | |||||
| } | |||||
| return | |||||
| } | |||||
| func (l *lexer) read() rune { | |||||
| if l.hasRune { | |||||
| l.hasRune = false | |||||
| l.seek(l.lastRuneSize) | |||||
| return l.lastRune | |||||
| } | |||||
| r, s := l.peek() | |||||
| l.seek(s) | |||||
| l.lastRune = r | |||||
| l.lastRuneSize = s | |||||
| return r | |||||
| } | |||||
| func (l *lexer) seek(w int) { | |||||
| l.pos += w | |||||
| } | |||||
| func (l *lexer) unread() { | |||||
| if l.hasRune { | |||||
| l.errorf("could not unread rune") | |||||
| return | |||||
| } | |||||
| l.seek(-l.lastRuneSize) | |||||
| l.hasRune = true | |||||
| } | |||||
| func (l *lexer) errorf(f string, v ...interface{}) { | |||||
| l.err = fmt.Errorf(f, v...) | |||||
| } | |||||
| func (l *lexer) inTerms() bool { | |||||
| return l.termsLevel > 0 | |||||
| } | |||||
| func (l *lexer) termsEnter() { | |||||
| l.termsLevel++ | |||||
| } | |||||
| func (l *lexer) termsLeave() { | |||||
| l.termsLevel-- | |||||
| } | |||||
| var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open} | |||||
| var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma) | |||||
| func (l *lexer) fetchItem() { | |||||
| r := l.read() | |||||
| switch { | |||||
| case r == eof: | |||||
| l.tokens.push(Token{EOF, ""}) | |||||
| case r == char_terms_open: | |||||
| l.termsEnter() | |||||
| l.tokens.push(Token{TermsOpen, string(r)}) | |||||
| case r == char_comma && l.inTerms(): | |||||
| l.tokens.push(Token{Separator, string(r)}) | |||||
| case r == char_terms_close && l.inTerms(): | |||||
| l.tokens.push(Token{TermsClose, string(r)}) | |||||
| l.termsLeave() | |||||
| case r == char_range_open: | |||||
| l.tokens.push(Token{RangeOpen, string(r)}) | |||||
| l.fetchRange() | |||||
| case r == char_single: | |||||
| l.tokens.push(Token{Single, string(r)}) | |||||
| case r == char_any: | |||||
| if l.read() == char_any { | |||||
| l.tokens.push(Token{Super, string(r) + string(r)}) | |||||
| } else { | |||||
| l.unread() | |||||
| l.tokens.push(Token{Any, string(r)}) | |||||
| } | |||||
| default: | |||||
| l.unread() | |||||
| var breakers []rune | |||||
| if l.inTerms() { | |||||
| breakers = inTermsBreakers | |||||
| } else { | |||||
| breakers = inTextBreakers | |||||
| } | |||||
| l.fetchText(breakers) | |||||
| } | |||||
| } | |||||
| func (l *lexer) fetchRange() { | |||||
| var wantHi bool | |||||
| var wantClose bool | |||||
| var seenNot bool | |||||
| for { | |||||
| r := l.read() | |||||
| if r == eof { | |||||
| l.errorf("unexpected end of input") | |||||
| return | |||||
| } | |||||
| if wantClose { | |||||
| if r != char_range_close { | |||||
| l.errorf("expected close range character") | |||||
| } else { | |||||
| l.tokens.push(Token{RangeClose, string(r)}) | |||||
| } | |||||
| return | |||||
| } | |||||
| if wantHi { | |||||
| l.tokens.push(Token{RangeHi, string(r)}) | |||||
| wantClose = true | |||||
| continue | |||||
| } | |||||
| if !seenNot && r == char_range_not { | |||||
| l.tokens.push(Token{Not, string(r)}) | |||||
| seenNot = true | |||||
| continue | |||||
| } | |||||
| if n, w := l.peek(); n == char_range_between { | |||||
| l.seek(w) | |||||
| l.tokens.push(Token{RangeLo, string(r)}) | |||||
| l.tokens.push(Token{RangeBetween, string(n)}) | |||||
| wantHi = true | |||||
| continue | |||||
| } | |||||
| l.unread() // unread first peek and fetch as text | |||||
| l.fetchText([]rune{char_range_close}) | |||||
| wantClose = true | |||||
| } | |||||
| } | |||||
| func (l *lexer) fetchText(breakers []rune) { | |||||
| var data []rune | |||||
| var escaped bool | |||||
| reading: | |||||
| for { | |||||
| r := l.read() | |||||
| if r == eof { | |||||
| break | |||||
| } | |||||
| if !escaped { | |||||
| if r == char_escape { | |||||
| escaped = true | |||||
| continue | |||||
| } | |||||
| if runes.IndexRune(breakers, r) != -1 { | |||||
| l.unread() | |||||
| break reading | |||||
| } | |||||
| } | |||||
| escaped = false | |||||
| data = append(data, r) | |||||
| } | |||||
| if len(data) > 0 { | |||||
| l.tokens.push(Token{Text, string(data)}) | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,88 @@ | |||||
| package lexer | |||||
| import "fmt" | |||||
| type TokenType int | |||||
| const ( | |||||
| EOF TokenType = iota | |||||
| Error | |||||
| Text | |||||
| Char | |||||
| Any | |||||
| Super | |||||
| Single | |||||
| Not | |||||
| Separator | |||||
| RangeOpen | |||||
| RangeClose | |||||
| RangeLo | |||||
| RangeHi | |||||
| RangeBetween | |||||
| TermsOpen | |||||
| TermsClose | |||||
| ) | |||||
| func (tt TokenType) String() string { | |||||
| switch tt { | |||||
| case EOF: | |||||
| return "eof" | |||||
| case Error: | |||||
| return "error" | |||||
| case Text: | |||||
| return "text" | |||||
| case Char: | |||||
| return "char" | |||||
| case Any: | |||||
| return "any" | |||||
| case Super: | |||||
| return "super" | |||||
| case Single: | |||||
| return "single" | |||||
| case Not: | |||||
| return "not" | |||||
| case Separator: | |||||
| return "separator" | |||||
| case RangeOpen: | |||||
| return "range_open" | |||||
| case RangeClose: | |||||
| return "range_close" | |||||
| case RangeLo: | |||||
| return "range_lo" | |||||
| case RangeHi: | |||||
| return "range_hi" | |||||
| case RangeBetween: | |||||
| return "range_between" | |||||
| case TermsOpen: | |||||
| return "terms_open" | |||||
| case TermsClose: | |||||
| return "terms_close" | |||||
| default: | |||||
| return "undef" | |||||
| } | |||||
| } | |||||
| type Token struct { | |||||
| Type TokenType | |||||
| Raw string | |||||
| } | |||||
| func (t Token) String() string { | |||||
| return fmt.Sprintf("%v<%q>", t.Type, t.Raw) | |||||
| } | |||||
| @@ -0,0 +1,14 @@ | |||||
| package syntax | |||||
| import ( | |||||
| "github.com/gobwas/glob/syntax/ast" | |||||
| "github.com/gobwas/glob/syntax/lexer" | |||||
| ) | |||||
| func Parse(s string) (*ast.Node, error) { | |||||
| return ast.Parse(lexer.NewLexer(s)) | |||||
| } | |||||
| func Special(b byte) bool { | |||||
| return lexer.Special(b) | |||||
| } | |||||
| @@ -0,0 +1,154 @@ | |||||
| package runes | |||||
| func Index(s, needle []rune) int { | |||||
| ls, ln := len(s), len(needle) | |||||
| switch { | |||||
| case ln == 0: | |||||
| return 0 | |||||
| case ln == 1: | |||||
| return IndexRune(s, needle[0]) | |||||
| case ln == ls: | |||||
| if Equal(s, needle) { | |||||
| return 0 | |||||
| } | |||||
| return -1 | |||||
| case ln > ls: | |||||
| return -1 | |||||
| } | |||||
| head: | |||||
| for i := 0; i < ls && ls-i >= ln; i++ { | |||||
| for y := 0; y < ln; y++ { | |||||
| if s[i+y] != needle[y] { | |||||
| continue head | |||||
| } | |||||
| } | |||||
| return i | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| func LastIndex(s, needle []rune) int { | |||||
| ls, ln := len(s), len(needle) | |||||
| switch { | |||||
| case ln == 0: | |||||
| if ls == 0 { | |||||
| return 0 | |||||
| } | |||||
| return ls | |||||
| case ln == 1: | |||||
| return IndexLastRune(s, needle[0]) | |||||
| case ln == ls: | |||||
| if Equal(s, needle) { | |||||
| return 0 | |||||
| } | |||||
| return -1 | |||||
| case ln > ls: | |||||
| return -1 | |||||
| } | |||||
| head: | |||||
| for i := ls - 1; i >= 0 && i >= ln; i-- { | |||||
| for y := ln - 1; y >= 0; y-- { | |||||
| if s[i-(ln-y-1)] != needle[y] { | |||||
| continue head | |||||
| } | |||||
| } | |||||
| return i - ln + 1 | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| // IndexAny returns the index of the first instance of any Unicode code point | |||||
| // from chars in s, or -1 if no Unicode code point from chars is present in s. | |||||
| func IndexAny(s, chars []rune) int { | |||||
| if len(chars) > 0 { | |||||
| for i, c := range s { | |||||
| for _, m := range chars { | |||||
| if c == m { | |||||
| return i | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| func Contains(s, needle []rune) bool { | |||||
| return Index(s, needle) >= 0 | |||||
| } | |||||
| func Max(s []rune) (max rune) { | |||||
| for _, r := range s { | |||||
| if r > max { | |||||
| max = r | |||||
| } | |||||
| } | |||||
| return | |||||
| } | |||||
| func Min(s []rune) rune { | |||||
| min := rune(-1) | |||||
| for _, r := range s { | |||||
| if min == -1 { | |||||
| min = r | |||||
| continue | |||||
| } | |||||
| if r < min { | |||||
| min = r | |||||
| } | |||||
| } | |||||
| return min | |||||
| } | |||||
| func IndexRune(s []rune, r rune) int { | |||||
| for i, c := range s { | |||||
| if c == r { | |||||
| return i | |||||
| } | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| func IndexLastRune(s []rune, r rune) int { | |||||
| for i := len(s) - 1; i >= 0; i-- { | |||||
| if s[i] == r { | |||||
| return i | |||||
| } | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| func Equal(a, b []rune) bool { | |||||
| if len(a) == len(b) { | |||||
| for i := 0; i < len(a); i++ { | |||||
| if a[i] != b[i] { | |||||
| return false | |||||
| } | |||||
| } | |||||
| return true | |||||
| } | |||||
| return false | |||||
| } | |||||
| // HasPrefix tests whether the string s begins with prefix. | |||||
| func HasPrefix(s, prefix []rune) bool { | |||||
| return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix) | |||||
| } | |||||
| // HasSuffix tests whether the string s ends with suffix. | |||||
| func HasSuffix(s, suffix []rune) bool { | |||||
| return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix) | |||||
| } | |||||
| @@ -0,0 +1,39 @@ | |||||
| package strings | |||||
| import ( | |||||
| "strings" | |||||
| "unicode/utf8" | |||||
| ) | |||||
| func IndexAnyRunes(s string, rs []rune) int { | |||||
| for _, r := range rs { | |||||
| if i := strings.IndexRune(s, r); i != -1 { | |||||
| return i | |||||
| } | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| func LastIndexAnyRunes(s string, rs []rune) int { | |||||
| for _, r := range rs { | |||||
| i := -1 | |||||
| if 0 <= r && r < utf8.RuneSelf { | |||||
| i = strings.LastIndexByte(s, byte(r)) | |||||
| } else { | |||||
| sub := s | |||||
| for len(sub) > 0 { | |||||
| j := strings.IndexRune(s, r) | |||||
| if j == -1 { | |||||
| break | |||||
| } | |||||
| i = j | |||||
| sub = sub[i+1:] | |||||
| } | |||||
| } | |||||
| if i != -1 { | |||||
| return i | |||||
| } | |||||
| } | |||||
| return -1 | |||||
| } | |||||
| @@ -199,6 +199,15 @@ github.com/go-swagger/go-swagger/generator | |||||
| github.com/go-swagger/go-swagger/scan | github.com/go-swagger/go-swagger/scan | ||||
| # github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b | # github.com/go-xorm/xorm v0.7.7-0.20190822154023-17592d96b35b | ||||
| github.com/go-xorm/xorm | github.com/go-xorm/xorm | ||||
| # github.com/gobwas/glob v0.2.3 | |||||
| github.com/gobwas/glob | |||||
| github.com/gobwas/glob/compiler | |||||
| github.com/gobwas/glob/syntax | |||||
| github.com/gobwas/glob/match | |||||
| github.com/gobwas/glob/syntax/ast | |||||
| github.com/gobwas/glob/util/runes | |||||
| github.com/gobwas/glob/syntax/lexer | |||||
| github.com/gobwas/glob/util/strings | |||||
| # github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 | # github.com/gogits/chardet v0.0.0-20150115103509-2404f7772561 | ||||
| github.com/gogits/chardet | github.com/gogits/chardet | ||||
| # github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | # github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | ||||