1
0
mirror of https://github.com/kubernetes-sigs/descheduler.git synced 2026-01-26 13:29:11 +01:00

bump to kubernetes v0.32.0-rc.2

Signed-off-by: Amir Alavi <amiralavi7@gmail.com>
This commit is contained in:
Amir Alavi
2024-12-06 10:36:06 -05:00
parent 2c11481856
commit f4c3fdf418
1822 changed files with 68460 additions and 50001 deletions

61
vendor/github.com/go-openapi/jsonpointer/.golangci.yml generated vendored Normal file
View File

@@ -0,0 +1,61 @@
linters-settings:
govet:
check-shadowing: true
golint:
min-confidence: 0
gocyclo:
min-complexity: 45
maligned:
suggest-new: true
dupl:
threshold: 200
goconst:
min-len: 2
min-occurrences: 3
linters:
enable-all: true
disable:
- maligned
- unparam
- lll
- gochecknoinits
- gochecknoglobals
- funlen
- godox
- gocognit
- whitespace
- wsl
- wrapcheck
- testpackage
- nlreturn
- gomnd
- exhaustivestruct
- goerr113
- errorlint
- nestif
- godot
- gofumpt
- paralleltest
- tparallel
- thelper
- ifshort
- exhaustruct
- varnamelen
- gci
- depguard
- errchkjson
- inamedparam
- nonamedreturns
- musttag
- ireturn
- forcetypeassert
- cyclop
# deprecated linters
- deadcode
- interfacer
- scopelint
- varcheck
- structcheck
- golint
- nosnakecase

View File

@@ -1,6 +1,10 @@
# gojsonpointer [![Build Status](https://travis-ci.org/go-openapi/jsonpointer.svg?branch=master)](https://travis-ci.org/go-openapi/jsonpointer) [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer)
[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE)
[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonpointer?status.svg)](http://godoc.org/github.com/go-openapi/jsonpointer)
An implementation of JSON Pointer - Go language
## Status

View File

@@ -110,19 +110,39 @@ func SetForToken(document any, decodedToken string, value any) (any, error) {
return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
}
func isNil(input any) bool {
if input == nil {
return true
}
kind := reflect.TypeOf(input).Kind()
switch kind { //nolint:exhaustive
case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
return reflect.ValueOf(input).IsNil()
default:
return false
}
}
func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) {
rValue := reflect.Indirect(reflect.ValueOf(node))
kind := rValue.Kind()
if isNil(node) {
return nil, kind, fmt.Errorf("nil value has not field %q", decodedToken)
}
if rValue.Type().Implements(jsonPointableType) {
r, err := node.(JSONPointable).JSONLookup(decodedToken)
switch typed := node.(type) {
case JSONPointable:
r, err := typed.JSONLookup(decodedToken)
if err != nil {
return nil, kind, err
}
return r, kind, nil
case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect
return getSingleImpl(*typed, decodedToken, nameProvider)
}
switch kind {
switch kind { //nolint:exhaustive
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
@@ -170,7 +190,7 @@ func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameP
return node.(JSONSetable).JSONSet(decodedToken, data)
}
switch rValue.Kind() {
switch rValue.Kind() { //nolint:exhaustive
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
@@ -231,8 +251,7 @@ func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.K
if err != nil {
return nil, knd, err
}
node, kind = r, knd
node = r
}
rValue := reflect.ValueOf(node)
@@ -245,7 +264,7 @@ func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error {
knd := reflect.ValueOf(node).Kind()
if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values")
return errors.New("only structs, pointers, maps and slices are supported for setting values")
}
if nameProvider == nil {
@@ -284,7 +303,7 @@ func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error {
continue
}
switch kind {
switch kind { //nolint:exhaustive
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
@@ -405,11 +424,11 @@ func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) {
case json.Delim:
switch tk {
case '{':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return 0, err
}
case '[':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return 0, err
}
}
@@ -435,20 +454,21 @@ func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) {
if err != nil {
return 0, err
}
switch tk := tk.(type) {
case json.Delim:
switch tk {
if delim, isDelim := tk.(json.Delim); isDelim {
switch delim {
case '{':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return 0, err
}
case '[':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return 0, err
}
}
}
}
if !dec.More() {
return 0, fmt.Errorf("token reference %q not found", decodedToken)
}
@@ -456,27 +476,27 @@ func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) {
}
// drainSingle drains a single level of object or array.
// The decoder has to guarantee the begining delim (i.e. '{' or '[') has been consumed.
// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed.
func drainSingle(dec *json.Decoder) error {
for dec.More() {
tk, err := dec.Token()
if err != nil {
return err
}
switch tk := tk.(type) {
case json.Delim:
switch tk {
if delim, isDelim := tk.(json.Delim); isDelim {
switch delim {
case '{':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return err
}
case '[':
if err := drainSingle(dec); err != nil {
if err = drainSingle(dec); err != nil {
return err
}
}
}
}
// Consumes the ending delim
if _, err := dec.Token(); err != nil {
return err
@@ -498,14 +518,14 @@ const (
// Unescape unescapes a json pointer reference token string to the original representation
func Unescape(token string) string {
step1 := strings.Replace(token, encRefTok1, decRefTok1, -1)
step2 := strings.Replace(step1, encRefTok0, decRefTok0, -1)
step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1)
step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0)
return step2
}
// Escape escapes a pointer reference token string
func Escape(token string) string {
step1 := strings.Replace(token, decRefTok0, encRefTok0, -1)
step2 := strings.Replace(step1, decRefTok1, encRefTok1, -1)
step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0)
step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1)
return step2
}

View File

@@ -2,3 +2,4 @@ secrets.yml
vendor
Godeps
.idea
*.out

View File

@@ -4,14 +4,14 @@ linters-settings:
golint:
min-confidence: 0
gocyclo:
min-complexity: 25
min-complexity: 45
maligned:
suggest-new: true
dupl:
threshold: 100
threshold: 200
goconst:
min-len: 3
min-occurrences: 2
min-occurrences: 3
linters:
enable-all: true
@@ -20,35 +20,41 @@ linters:
- lll
- gochecknoinits
- gochecknoglobals
- nlreturn
- testpackage
- funlen
- godox
- gocognit
- whitespace
- wsl
- wrapcheck
- testpackage
- nlreturn
- gomnd
- exhaustive
- exhaustivestruct
- goerr113
- wsl
- whitespace
- gofumpt
- godot
- errorlint
- nestif
- godox
- funlen
- gci
- gocognit
- godot
- gofumpt
- paralleltest
- tparallel
- thelper
- ifshort
- gomoddirectives
- cyclop
- forcetypeassert
- ireturn
- tagliatelle
- varnamelen
- goimports
- tenv
- golint
- exhaustruct
- nilnil
- varnamelen
- gci
- depguard
- errchkjson
- inamedparam
- nonamedreturns
- musttag
- ireturn
- forcetypeassert
- cyclop
# deprecated linters
- deadcode
- interfacer
- scopelint
- varcheck
- structcheck
- golint
- nosnakecase

52
vendor/github.com/go-openapi/swag/BENCHMARK.md generated vendored Normal file
View File

@@ -0,0 +1,52 @@
# Benchmarks
## Name mangling utilities
```bash
go test -bench XXX -run XXX -benchtime 30s
```
### Benchmarks at b3e7a5386f996177e4808f11acb2aa93a0f660df
```
goos: linux
goarch: amd64
pkg: github.com/go-openapi/swag
cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
BenchmarkToXXXName/ToGoName-4 862623 44101 ns/op 10450 B/op 732 allocs/op
BenchmarkToXXXName/ToVarName-4 853656 40728 ns/op 10468 B/op 734 allocs/op
BenchmarkToXXXName/ToFileName-4 1268312 27813 ns/op 9785 B/op 617 allocs/op
BenchmarkToXXXName/ToCommandName-4 1276322 27903 ns/op 9785 B/op 617 allocs/op
BenchmarkToXXXName/ToHumanNameLower-4 895334 40354 ns/op 10472 B/op 731 allocs/op
BenchmarkToXXXName/ToHumanNameTitle-4 882441 40678 ns/op 10566 B/op 749 allocs/op
```
### Benchmarks after PR #79
~ x10 performance improvement and ~ /100 memory allocations.
```
goos: linux
goarch: amd64
pkg: github.com/go-openapi/swag
cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
BenchmarkToXXXName/ToGoName-4 9595830 3991 ns/op 42 B/op 5 allocs/op
BenchmarkToXXXName/ToVarName-4 9194276 3984 ns/op 62 B/op 7 allocs/op
BenchmarkToXXXName/ToFileName-4 17002711 2123 ns/op 147 B/op 7 allocs/op
BenchmarkToXXXName/ToCommandName-4 16772926 2111 ns/op 147 B/op 7 allocs/op
BenchmarkToXXXName/ToHumanNameLower-4 9788331 3749 ns/op 92 B/op 6 allocs/op
BenchmarkToXXXName/ToHumanNameTitle-4 9188260 3941 ns/op 104 B/op 6 allocs/op
```
```
goos: linux
goarch: amd64
pkg: github.com/go-openapi/swag
cpu: AMD Ryzen 7 5800X 8-Core Processor
BenchmarkToXXXName/ToGoName-16 18527378 1972 ns/op 42 B/op 5 allocs/op
BenchmarkToXXXName/ToVarName-16 15552692 2093 ns/op 62 B/op 7 allocs/op
BenchmarkToXXXName/ToFileName-16 32161176 1117 ns/op 147 B/op 7 allocs/op
BenchmarkToXXXName/ToCommandName-16 32256634 1137 ns/op 147 B/op 7 allocs/op
BenchmarkToXXXName/ToHumanNameLower-16 18599661 1946 ns/op 92 B/op 6 allocs/op
BenchmarkToXXXName/ToHumanNameTitle-16 17581353 2054 ns/op 105 B/op 6 allocs/op
```

View File

@@ -1,7 +1,8 @@
# Swag [![Build Status](https://travis-ci.org/go-openapi/swag.svg?branch=master)](https://travis-ci.org/go-openapi/swag) [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
# Swag [![Build Status](https://github.com/go-openapi/swag/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/swag/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag)
[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/go-openapi/swag?status.svg)](http://godoc.org/github.com/go-openapi/swag)
[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/swag.svg)](https://pkg.go.dev/github.com/go-openapi/swag)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/swag)](https://goreportcard.com/report/github.com/go-openapi/swag)
Contains a bunch of helper functions for go-openapi and go-swagger projects.
@@ -18,4 +19,5 @@ You may also use it standalone for your projects.
This repo has only few dependencies outside of the standard library:
* YAML utilities depend on gopkg.in/yaml.v2
* YAML utilities depend on `gopkg.in/yaml.v3`
* `github.com/mailru/easyjson v0.7.7`

202
vendor/github.com/go-openapi/swag/initialism_index.go generated vendored Normal file
View File

@@ -0,0 +1,202 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package swag
import (
"sort"
"strings"
"sync"
)
var (
// commonInitialisms are common acronyms that are kept as whole uppercased words.
commonInitialisms *indexOfInitialisms
// initialisms is a slice of sorted initialisms
initialisms []string
// a copy of initialisms pre-baked as []rune
initialismsRunes [][]rune
initialismsUpperCased [][]rune
isInitialism func(string) bool
maxAllocMatches int
)
func init() {
// Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769
configuredInitialisms := map[string]bool{
"ACL": true,
"API": true,
"ASCII": true,
"CPU": true,
"CSS": true,
"DNS": true,
"EOF": true,
"GUID": true,
"HTML": true,
"HTTPS": true,
"HTTP": true,
"ID": true,
"IP": true,
"IPv4": true,
"IPv6": true,
"JSON": true,
"LHS": true,
"OAI": true,
"QPS": true,
"RAM": true,
"RHS": true,
"RPC": true,
"SLA": true,
"SMTP": true,
"SQL": true,
"SSH": true,
"TCP": true,
"TLS": true,
"TTL": true,
"UDP": true,
"UI": true,
"UID": true,
"UUID": true,
"URI": true,
"URL": true,
"UTF8": true,
"VM": true,
"XML": true,
"XMPP": true,
"XSRF": true,
"XSS": true,
}
// a thread-safe index of initialisms
commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms)
initialisms = commonInitialisms.sorted()
initialismsRunes = asRunes(initialisms)
initialismsUpperCased = asUpperCased(initialisms)
maxAllocMatches = maxAllocHeuristic(initialismsRunes)
// a test function
isInitialism = commonInitialisms.isInitialism
}
func asRunes(in []string) [][]rune {
out := make([][]rune, len(in))
for i, initialism := range in {
out[i] = []rune(initialism)
}
return out
}
func asUpperCased(in []string) [][]rune {
out := make([][]rune, len(in))
for i, initialism := range in {
out[i] = []rune(upper(trim(initialism)))
}
return out
}
func maxAllocHeuristic(in [][]rune) int {
heuristic := make(map[rune]int)
for _, initialism := range in {
heuristic[initialism[0]]++
}
var maxAlloc int
for _, val := range heuristic {
if val > maxAlloc {
maxAlloc = val
}
}
return maxAlloc
}
// AddInitialisms add additional initialisms
func AddInitialisms(words ...string) {
for _, word := range words {
// commonInitialisms[upper(word)] = true
commonInitialisms.add(upper(word))
}
// sort again
initialisms = commonInitialisms.sorted()
initialismsRunes = asRunes(initialisms)
initialismsUpperCased = asUpperCased(initialisms)
}
// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
// Since go1.9, this may be implemented with sync.Map.
type indexOfInitialisms struct {
sortMutex *sync.Mutex
index *sync.Map
}
func newIndexOfInitialisms() *indexOfInitialisms {
return &indexOfInitialisms{
sortMutex: new(sync.Mutex),
index: new(sync.Map),
}
}
func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
m.sortMutex.Lock()
defer m.sortMutex.Unlock()
for k, v := range initial {
m.index.Store(k, v)
}
return m
}
func (m *indexOfInitialisms) isInitialism(key string) bool {
_, ok := m.index.Load(key)
return ok
}
func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
m.index.Store(key, true)
return m
}
func (m *indexOfInitialisms) sorted() (result []string) {
m.sortMutex.Lock()
defer m.sortMutex.Unlock()
m.index.Range(func(key, _ interface{}) bool {
k := key.(string)
result = append(result, k)
return true
})
sort.Sort(sort.Reverse(byInitialism(result)))
return
}
type byInitialism []string
func (s byInitialism) Len() int {
return len(s)
}
func (s byInitialism) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func (s byInitialism) Less(i, j int) bool {
if len(s[i]) != len(s[j]) {
return len(s[i]) < len(s[j])
}
return strings.Compare(s[i], s[j]) > 0
}

View File

@@ -21,6 +21,7 @@ import (
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"runtime"
"strings"
@@ -40,43 +41,97 @@ var LoadHTTPBasicAuthPassword = ""
var LoadHTTPCustomHeaders = map[string]string{}
// LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in
func LoadFromFileOrHTTP(path string) ([]byte, error) {
return LoadStrategy(path, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(path)
func LoadFromFileOrHTTP(pth string) ([]byte, error) {
return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(pth)
}
// LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in
// timeout arg allows for per request overriding of the request timeout
func LoadFromFileOrHTTPWithTimeout(path string, timeout time.Duration) ([]byte, error) {
return LoadStrategy(path, os.ReadFile, loadHTTPBytes(timeout))(path)
func LoadFromFileOrHTTPWithTimeout(pth string, timeout time.Duration) ([]byte, error) {
return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(timeout))(pth)
}
// LoadStrategy returns a loader function for a given path or uri
func LoadStrategy(path string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) {
if strings.HasPrefix(path, "http") {
// LoadStrategy returns a loader function for a given path or URI.
//
// The load strategy returns the remote load for any path starting with `http`.
// So this works for any URI with a scheme `http` or `https`.
//
// The fallback strategy is to call the local loader.
//
// The local loader takes a local file system path (absolute or relative) as argument,
// or alternatively a `file://...` URI, **without host** (see also below for windows).
//
// There are a few liberalities, initially intended to be tolerant regarding the URI syntax,
// especially on windows.
//
// Before the local loader is called, the given path is transformed:
// - percent-encoded characters are unescaped
// - simple paths (e.g. `./folder/file`) are passed as-is
// - on windows, occurrences of `/` are replaced by `\`, so providing a relative path such a `folder/file` works too.
//
// For paths provided as URIs with the "file" scheme, please note that:
// - `file://` is simply stripped.
// This means that the host part of the URI is not parsed at all.
// For example, `file:///folder/file" becomes "/folder/file`,
// but `file://localhost/folder/file` becomes `localhost/folder/file` on unix systems.
// Similarly, `file://./folder/file` yields `./folder/file`.
// - on windows, `file://...` can take a host so as to specify an UNC share location.
//
// Reminder about windows-specifics:
// - `file://host/folder/file` becomes an UNC path like `\\host\folder\file` (no port specification is supported)
// - `file:///c:/folder/file` becomes `C:\folder\file`
// - `file://c:/folder/file` is tolerated (without leading `/`) and becomes `c:\folder\file`
func LoadStrategy(pth string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) {
if strings.HasPrefix(pth, "http") {
return remote
}
return func(pth string) ([]byte, error) {
upth, err := pathUnescape(pth)
return func(p string) ([]byte, error) {
upth, err := url.PathUnescape(p)
if err != nil {
return nil, err
}
if strings.HasPrefix(pth, `file://`) {
if runtime.GOOS == "windows" {
// support for canonical file URIs on windows.
// Zero tolerance here for dodgy URIs.
u, _ := url.Parse(upth)
if u.Host != "" {
// assume UNC name (volume share)
// file://host/share/folder\... ==> \\host\share\path\folder
// NOTE: UNC port not yet supported
upth = strings.Join([]string{`\`, u.Host, u.Path}, `\`)
} else {
// file:///c:/folder/... ==> just remove the leading slash
upth = strings.TrimPrefix(upth, `file:///`)
}
} else {
upth = strings.TrimPrefix(upth, `file://`)
if !strings.HasPrefix(p, `file://`) {
// regular file path provided: just normalize slashes
return local(filepath.FromSlash(upth))
}
if runtime.GOOS != "windows" {
// crude processing: this leaves full URIs with a host with a (mostly) unexpected result
upth = strings.TrimPrefix(upth, `file://`)
return local(filepath.FromSlash(upth))
}
// windows-only pre-processing of file://... URIs
// support for canonical file URIs on windows.
u, err := url.Parse(filepath.ToSlash(upth))
if err != nil {
return nil, err
}
if u.Host != "" {
// assume UNC name (volume share)
// NOTE: UNC port not yet supported
// when the "host" segment is a drive letter:
// file://C:/folder/... => C:\folder
upth = path.Clean(strings.Join([]string{u.Host, u.Path}, `/`))
if !strings.HasSuffix(u.Host, ":") && u.Host[0] != '.' {
// tolerance: if we have a leading dot, this can't be a host
// file://host/share/folder\... ==> \\host\share\path\folder
upth = "//" + upth
}
} else {
// no host, let's figure out if this is a drive letter
upth = strings.TrimPrefix(upth, `file://`)
first, _, _ := strings.Cut(strings.TrimPrefix(u.Path, "/"), "/")
if strings.HasSuffix(first, ":") {
// drive letter in the first segment:
// file:///c:/folder/... ==> strip the leading slash
upth = strings.TrimPrefix(upth, `/`)
}
}

View File

@@ -14,74 +14,80 @@
package swag
import "unicode"
import (
"unicode"
"unicode/utf8"
)
type (
nameLexem interface {
GetUnsafeGoName() string
GetOriginal() string
IsInitialism() bool
}
lexemKind uint8
initialismNameLexem struct {
nameLexem struct {
original string
matchedInitialism string
}
casualNameLexem struct {
original string
kind lexemKind
}
)
func newInitialismNameLexem(original, matchedInitialism string) *initialismNameLexem {
return &initialismNameLexem{
const (
lexemKindCasualName lexemKind = iota
lexemKindInitialismName
)
func newInitialismNameLexem(original, matchedInitialism string) nameLexem {
return nameLexem{
kind: lexemKindInitialismName,
original: original,
matchedInitialism: matchedInitialism,
}
}
func newCasualNameLexem(original string) *casualNameLexem {
return &casualNameLexem{
func newCasualNameLexem(original string) nameLexem {
return nameLexem{
kind: lexemKindCasualName,
original: original,
}
}
func (l *initialismNameLexem) GetUnsafeGoName() string {
return l.matchedInitialism
}
func (l nameLexem) GetUnsafeGoName() string {
if l.kind == lexemKindInitialismName {
return l.matchedInitialism
}
var (
first rune
rest string
)
func (l *casualNameLexem) GetUnsafeGoName() string {
var first rune
var rest string
for i, orig := range l.original {
if i == 0 {
first = orig
continue
}
if i > 0 {
rest = l.original[i:]
break
}
}
if len(l.original) > 1 {
return string(unicode.ToUpper(first)) + lower(rest)
b := poolOfBuffers.BorrowBuffer(utf8.UTFMax + len(rest))
defer func() {
poolOfBuffers.RedeemBuffer(b)
}()
b.WriteRune(unicode.ToUpper(first))
b.WriteString(lower(rest))
return b.String()
}
return l.original
}
func (l *initialismNameLexem) GetOriginal() string {
func (l nameLexem) GetOriginal() string {
return l.original
}
func (l *casualNameLexem) GetOriginal() string {
return l.original
}
func (l *initialismNameLexem) IsInitialism() bool {
return true
}
func (l *casualNameLexem) IsInitialism() bool {
return false
func (l nameLexem) IsInitialism() bool {
return l.kind == lexemKindInitialismName
}

View File

@@ -1,24 +0,0 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build go1.8
// +build go1.8
package swag
import "net/url"
func pathUnescape(path string) (string, error) {
return url.PathUnescape(path)
}

View File

@@ -1,68 +0,0 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build go1.9
// +build go1.9
package swag
import (
"sort"
"sync"
)
// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
// Since go1.9, this may be implemented with sync.Map.
type indexOfInitialisms struct {
sortMutex *sync.Mutex
index *sync.Map
}
func newIndexOfInitialisms() *indexOfInitialisms {
return &indexOfInitialisms{
sortMutex: new(sync.Mutex),
index: new(sync.Map),
}
}
func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
m.sortMutex.Lock()
defer m.sortMutex.Unlock()
for k, v := range initial {
m.index.Store(k, v)
}
return m
}
func (m *indexOfInitialisms) isInitialism(key string) bool {
_, ok := m.index.Load(key)
return ok
}
func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
m.index.Store(key, true)
return m
}
func (m *indexOfInitialisms) sorted() (result []string) {
m.sortMutex.Lock()
defer m.sortMutex.Unlock()
m.index.Range(func(key, value interface{}) bool {
k := key.(string)
result = append(result, k)
return true
})
sort.Sort(sort.Reverse(byInitialism(result)))
return
}

View File

@@ -1,24 +0,0 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build !go1.8
// +build !go1.8
package swag
import "net/url"
func pathUnescape(path string) (string, error) {
return url.QueryUnescape(path)
}

View File

@@ -1,70 +0,0 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build !go1.9
// +build !go1.9
package swag
import (
"sort"
"sync"
)
// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
// Before go1.9, this may be implemented with a mutex on the map.
type indexOfInitialisms struct {
getMutex *sync.Mutex
index map[string]bool
}
func newIndexOfInitialisms() *indexOfInitialisms {
return &indexOfInitialisms{
getMutex: new(sync.Mutex),
index: make(map[string]bool, 50),
}
}
func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
m.getMutex.Lock()
defer m.getMutex.Unlock()
for k, v := range initial {
m.index[k] = v
}
return m
}
func (m *indexOfInitialisms) isInitialism(key string) bool {
m.getMutex.Lock()
defer m.getMutex.Unlock()
_, ok := m.index[key]
return ok
}
func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
m.getMutex.Lock()
defer m.getMutex.Unlock()
m.index[key] = true
return m
}
func (m *indexOfInitialisms) sorted() (result []string) {
m.getMutex.Lock()
defer m.getMutex.Unlock()
for k := range m.index {
result = append(result, k)
}
sort.Sort(sort.Reverse(byInitialism(result)))
return
}

View File

@@ -15,124 +15,269 @@
package swag
import (
"bytes"
"sync"
"unicode"
"unicode/utf8"
)
var nameReplaceTable = map[rune]string{
'@': "At ",
'&': "And ",
'|': "Pipe ",
'$': "Dollar ",
'!': "Bang ",
'-': "",
'_': "",
}
type (
splitter struct {
postSplitInitialismCheck bool
initialisms []string
initialismsRunes [][]rune
initialismsUpperCased [][]rune // initialisms cached in their trimmed, upper-cased version
postSplitInitialismCheck bool
}
splitterOption func(*splitter) *splitter
splitterOption func(*splitter)
initialismMatch struct {
body []rune
start, end int
complete bool
}
initialismMatches []initialismMatch
)
// split calls the splitter; splitter provides more control and post options
func split(str string) []string {
lexems := newSplitter().split(str)
result := make([]string, 0, len(lexems))
type (
// memory pools of temporary objects.
//
// These are used to recycle temporarily allocated objects
// and relieve the GC from undue pressure.
for _, lexem := range lexems {
matchesPool struct {
*sync.Pool
}
buffersPool struct {
*sync.Pool
}
lexemsPool struct {
*sync.Pool
}
splittersPool struct {
*sync.Pool
}
)
var (
// poolOfMatches holds temporary slices for recycling during the initialism match process
poolOfMatches = matchesPool{
Pool: &sync.Pool{
New: func() any {
s := make(initialismMatches, 0, maxAllocMatches)
return &s
},
},
}
poolOfBuffers = buffersPool{
Pool: &sync.Pool{
New: func() any {
return new(bytes.Buffer)
},
},
}
poolOfLexems = lexemsPool{
Pool: &sync.Pool{
New: func() any {
s := make([]nameLexem, 0, maxAllocMatches)
return &s
},
},
}
poolOfSplitters = splittersPool{
Pool: &sync.Pool{
New: func() any {
s := newSplitter()
return &s
},
},
}
)
// nameReplaceTable finds a word representation for special characters.
func nameReplaceTable(r rune) (string, bool) {
switch r {
case '@':
return "At ", true
case '&':
return "And ", true
case '|':
return "Pipe ", true
case '$':
return "Dollar ", true
case '!':
return "Bang ", true
case '-':
return "", true
case '_':
return "", true
default:
return "", false
}
}
// split calls the splitter.
//
// Use newSplitter for more control and options
func split(str string) []string {
s := poolOfSplitters.BorrowSplitter()
lexems := s.split(str)
result := make([]string, 0, len(*lexems))
for _, lexem := range *lexems {
result = append(result, lexem.GetOriginal())
}
poolOfLexems.RedeemLexems(lexems)
poolOfSplitters.RedeemSplitter(s)
return result
}
func (s *splitter) split(str string) []nameLexem {
return s.toNameLexems(str)
}
func newSplitter(options ...splitterOption) *splitter {
splitter := &splitter{
func newSplitter(options ...splitterOption) splitter {
s := splitter{
postSplitInitialismCheck: false,
initialisms: initialisms,
initialismsRunes: initialismsRunes,
initialismsUpperCased: initialismsUpperCased,
}
for _, option := range options {
splitter = option(splitter)
option(&s)
}
return splitter
}
// withPostSplitInitialismCheck allows to catch initialisms after main split process
func withPostSplitInitialismCheck(s *splitter) *splitter {
s.postSplitInitialismCheck = true
return s
}
type (
initialismMatch struct {
start, end int
body []rune
complete bool
}
initialismMatches []*initialismMatch
)
// withPostSplitInitialismCheck allows to catch initialisms after main split process
func withPostSplitInitialismCheck(s *splitter) {
s.postSplitInitialismCheck = true
}
func (s *splitter) toNameLexems(name string) []nameLexem {
func (p matchesPool) BorrowMatches() *initialismMatches {
s := p.Get().(*initialismMatches)
*s = (*s)[:0] // reset slice, keep allocated capacity
return s
}
func (p buffersPool) BorrowBuffer(size int) *bytes.Buffer {
s := p.Get().(*bytes.Buffer)
s.Reset()
if s.Cap() < size {
s.Grow(size)
}
return s
}
func (p lexemsPool) BorrowLexems() *[]nameLexem {
s := p.Get().(*[]nameLexem)
*s = (*s)[:0] // reset slice, keep allocated capacity
return s
}
func (p splittersPool) BorrowSplitter(options ...splitterOption) *splitter {
s := p.Get().(*splitter)
s.postSplitInitialismCheck = false // reset options
for _, apply := range options {
apply(s)
}
return s
}
func (p matchesPool) RedeemMatches(s *initialismMatches) {
p.Put(s)
}
func (p buffersPool) RedeemBuffer(s *bytes.Buffer) {
p.Put(s)
}
func (p lexemsPool) RedeemLexems(s *[]nameLexem) {
p.Put(s)
}
func (p splittersPool) RedeemSplitter(s *splitter) {
p.Put(s)
}
func (m initialismMatch) isZero() bool {
return m.start == 0 && m.end == 0
}
func (s splitter) split(name string) *[]nameLexem {
nameRunes := []rune(name)
matches := s.gatherInitialismMatches(nameRunes)
if matches == nil {
return poolOfLexems.BorrowLexems()
}
return s.mapMatchesToNameLexems(nameRunes, matches)
}
func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches {
matches := make(initialismMatches, 0)
func (s splitter) gatherInitialismMatches(nameRunes []rune) *initialismMatches {
var matches *initialismMatches
for currentRunePosition, currentRune := range nameRunes {
newMatches := make(initialismMatches, 0, len(matches))
// recycle these allocations as we loop over runes
// with such recycling, only 2 slices should be allocated per call
// instead of o(n).
newMatches := poolOfMatches.BorrowMatches()
// check current initialism matches
for _, match := range matches {
if keepCompleteMatch := match.complete; keepCompleteMatch {
newMatches = append(newMatches, match)
continue
}
// drop failed match
currentMatchRune := match.body[currentRunePosition-match.start]
if !s.initialismRuneEqual(currentMatchRune, currentRune) {
continue
}
// try to complete ongoing match
if currentRunePosition-match.start == len(match.body)-1 {
// we are close; the next step is to check the symbol ahead
// if it is a small letter, then it is not the end of match
// but beginning of the next word
if currentRunePosition < len(nameRunes)-1 {
nextRune := nameRunes[currentRunePosition+1]
if newWord := unicode.IsLower(nextRune); newWord {
// oh ok, it was the start of a new word
continue
}
if matches != nil { // skip first iteration
for _, match := range *matches {
if keepCompleteMatch := match.complete; keepCompleteMatch {
*newMatches = append(*newMatches, match)
continue
}
match.complete = true
match.end = currentRunePosition
}
// drop failed match
currentMatchRune := match.body[currentRunePosition-match.start]
if currentMatchRune != currentRune {
continue
}
newMatches = append(newMatches, match)
// try to complete ongoing match
if currentRunePosition-match.start == len(match.body)-1 {
// we are close; the next step is to check the symbol ahead
// if it is a small letter, then it is not the end of match
// but beginning of the next word
if currentRunePosition < len(nameRunes)-1 {
nextRune := nameRunes[currentRunePosition+1]
if newWord := unicode.IsLower(nextRune); newWord {
// oh ok, it was the start of a new word
continue
}
}
match.complete = true
match.end = currentRunePosition
}
*newMatches = append(*newMatches, match)
}
}
// check for new initialism matches
for _, initialism := range s.initialisms {
initialismRunes := []rune(initialism)
if s.initialismRuneEqual(initialismRunes[0], currentRune) {
newMatches = append(newMatches, &initialismMatch{
for i := range s.initialisms {
initialismRunes := s.initialismsRunes[i]
if initialismRunes[0] == currentRune {
*newMatches = append(*newMatches, initialismMatch{
start: currentRunePosition,
body: initialismRunes,
complete: false,
@@ -140,24 +285,28 @@ func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches {
}
}
if matches != nil {
poolOfMatches.RedeemMatches(matches)
}
matches = newMatches
}
// up to the caller to redeem this last slice
return matches
}
func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMatches) []nameLexem {
nameLexems := make([]nameLexem, 0)
func (s splitter) mapMatchesToNameLexems(nameRunes []rune, matches *initialismMatches) *[]nameLexem {
nameLexems := poolOfLexems.BorrowLexems()
var lastAcceptedMatch *initialismMatch
for _, match := range matches {
var lastAcceptedMatch initialismMatch
for _, match := range *matches {
if !match.complete {
continue
}
if firstMatch := lastAcceptedMatch == nil; firstMatch {
nameLexems = append(nameLexems, s.breakCasualString(nameRunes[:match.start])...)
nameLexems = append(nameLexems, s.breakInitialism(string(match.body)))
if firstMatch := lastAcceptedMatch.isZero(); firstMatch {
s.appendBrokenDownCasualString(nameLexems, nameRunes[:match.start])
*nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
lastAcceptedMatch = match
@@ -169,63 +318,66 @@ func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMa
}
middle := nameRunes[lastAcceptedMatch.end+1 : match.start]
nameLexems = append(nameLexems, s.breakCasualString(middle)...)
nameLexems = append(nameLexems, s.breakInitialism(string(match.body)))
s.appendBrokenDownCasualString(nameLexems, middle)
*nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
lastAcceptedMatch = match
}
// we have not found any accepted matches
if lastAcceptedMatch == nil {
return s.breakCasualString(nameRunes)
if lastAcceptedMatch.isZero() {
*nameLexems = (*nameLexems)[:0]
s.appendBrokenDownCasualString(nameLexems, nameRunes)
} else if lastAcceptedMatch.end+1 != len(nameRunes) {
rest := nameRunes[lastAcceptedMatch.end+1:]
s.appendBrokenDownCasualString(nameLexems, rest)
}
if lastAcceptedMatch.end+1 != len(nameRunes) {
rest := nameRunes[lastAcceptedMatch.end+1:]
nameLexems = append(nameLexems, s.breakCasualString(rest)...)
}
poolOfMatches.RedeemMatches(matches)
return nameLexems
}
func (s *splitter) initialismRuneEqual(a, b rune) bool {
return a == b
}
func (s *splitter) breakInitialism(original string) nameLexem {
func (s splitter) breakInitialism(original string) nameLexem {
return newInitialismNameLexem(original, original)
}
func (s *splitter) breakCasualString(str []rune) []nameLexem {
segments := make([]nameLexem, 0)
currentSegment := ""
func (s splitter) appendBrokenDownCasualString(segments *[]nameLexem, str []rune) {
currentSegment := poolOfBuffers.BorrowBuffer(len(str)) // unlike strings.Builder, bytes.Buffer initial storage can reused
defer func() {
poolOfBuffers.RedeemBuffer(currentSegment)
}()
addCasualNameLexem := func(original string) {
segments = append(segments, newCasualNameLexem(original))
*segments = append(*segments, newCasualNameLexem(original))
}
addInitialismNameLexem := func(original, match string) {
segments = append(segments, newInitialismNameLexem(original, match))
*segments = append(*segments, newInitialismNameLexem(original, match))
}
addNameLexem := func(original string) {
if s.postSplitInitialismCheck {
for _, initialism := range s.initialisms {
if upper(initialism) == upper(original) {
addInitialismNameLexem(original, initialism)
var addNameLexem func(string)
if s.postSplitInitialismCheck {
addNameLexem = func(original string) {
for i := range s.initialisms {
if isEqualFoldIgnoreSpace(s.initialismsUpperCased[i], original) {
addInitialismNameLexem(original, s.initialisms[i])
return
}
}
}
addCasualNameLexem(original)
addCasualNameLexem(original)
}
} else {
addNameLexem = addCasualNameLexem
}
for _, rn := range string(str) {
if replace, found := nameReplaceTable[rn]; found {
if currentSegment != "" {
addNameLexem(currentSegment)
currentSegment = ""
for _, rn := range str {
if replace, found := nameReplaceTable(rn); found {
if currentSegment.Len() > 0 {
addNameLexem(currentSegment.String())
currentSegment.Reset()
}
if replace != "" {
@@ -236,27 +388,121 @@ func (s *splitter) breakCasualString(str []rune) []nameLexem {
}
if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) {
if currentSegment != "" {
addNameLexem(currentSegment)
currentSegment = ""
if currentSegment.Len() > 0 {
addNameLexem(currentSegment.String())
currentSegment.Reset()
}
continue
}
if unicode.IsUpper(rn) {
if currentSegment != "" {
addNameLexem(currentSegment)
if currentSegment.Len() > 0 {
addNameLexem(currentSegment.String())
}
currentSegment = ""
currentSegment.Reset()
}
currentSegment += string(rn)
currentSegment.WriteRune(rn)
}
if currentSegment != "" {
addNameLexem(currentSegment)
if currentSegment.Len() > 0 {
addNameLexem(currentSegment.String())
}
return segments
}
// isEqualFoldIgnoreSpace is the same as strings.EqualFold, but
// it ignores leading and trailing blank spaces in the compared
// string.
//
// base is assumed to be composed of upper-cased runes, and be already
// trimmed.
//
// This code is heavily inspired from strings.EqualFold.
func isEqualFoldIgnoreSpace(base []rune, str string) bool {
var i, baseIndex int
// equivalent to b := []byte(str), but without data copy
b := hackStringBytes(str)
for i < len(b) {
if c := b[i]; c < utf8.RuneSelf {
// fast path for ASCII
if c != ' ' && c != '\t' {
break
}
i++
continue
}
// unicode case
r, size := utf8.DecodeRune(b[i:])
if !unicode.IsSpace(r) {
break
}
i += size
}
if i >= len(b) {
return len(base) == 0
}
for _, baseRune := range base {
if i >= len(b) {
break
}
if c := b[i]; c < utf8.RuneSelf {
// single byte rune case (ASCII)
if baseRune >= utf8.RuneSelf {
return false
}
baseChar := byte(baseRune)
if c != baseChar &&
!('a' <= c && c <= 'z' && c-'a'+'A' == baseChar) {
return false
}
baseIndex++
i++
continue
}
// unicode case
r, size := utf8.DecodeRune(b[i:])
if unicode.ToUpper(r) != baseRune {
return false
}
baseIndex++
i += size
}
if baseIndex != len(base) {
return false
}
// all passed: now we should only have blanks
for i < len(b) {
if c := b[i]; c < utf8.RuneSelf {
// fast path for ASCII
if c != ' ' && c != '\t' {
return false
}
i++
continue
}
// unicode case
r, size := utf8.DecodeRune(b[i:])
if !unicode.IsSpace(r) {
return false
}
i += size
}
return true
}

8
vendor/github.com/go-openapi/swag/string_bytes.go generated vendored Normal file
View File

@@ -0,0 +1,8 @@
package swag
import "unsafe"
// hackStringBytes returns the (unsafe) underlying bytes slice of a string.
func hackStringBytes(str string) []byte {
return unsafe.Slice(unsafe.StringData(str), len(str))
}

View File

@@ -18,76 +18,25 @@ import (
"reflect"
"strings"
"unicode"
"unicode/utf8"
)
// commonInitialisms are common acronyms that are kept as whole uppercased words.
var commonInitialisms *indexOfInitialisms
// initialisms is a slice of sorted initialisms
var initialisms []string
var isInitialism func(string) bool
// GoNamePrefixFunc sets an optional rule to prefix go names
// which do not start with a letter.
//
// The prefix function is assumed to return a string that starts with an upper case letter.
//
// e.g. to help convert "123" into "{prefix}123"
//
// The default is to prefix with "X"
var GoNamePrefixFunc func(string) string
func init() {
// Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769
var configuredInitialisms = map[string]bool{
"ACL": true,
"API": true,
"ASCII": true,
"CPU": true,
"CSS": true,
"DNS": true,
"EOF": true,
"GUID": true,
"HTML": true,
"HTTPS": true,
"HTTP": true,
"ID": true,
"IP": true,
"IPv4": true,
"IPv6": true,
"JSON": true,
"LHS": true,
"OAI": true,
"QPS": true,
"RAM": true,
"RHS": true,
"RPC": true,
"SLA": true,
"SMTP": true,
"SQL": true,
"SSH": true,
"TCP": true,
"TLS": true,
"TTL": true,
"UDP": true,
"UI": true,
"UID": true,
"UUID": true,
"URI": true,
"URL": true,
"UTF8": true,
"VM": true,
"XML": true,
"XMPP": true,
"XSRF": true,
"XSS": true,
func prefixFunc(name, in string) string {
if GoNamePrefixFunc == nil {
return "X" + in
}
// a thread-safe index of initialisms
commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms)
initialisms = commonInitialisms.sorted()
// a test function
isInitialism = commonInitialisms.isInitialism
return GoNamePrefixFunc(name) + in
}
const (
@@ -156,25 +105,9 @@ func SplitByFormat(data, format string) []string {
return result
}
type byInitialism []string
func (s byInitialism) Len() int {
return len(s)
}
func (s byInitialism) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func (s byInitialism) Less(i, j int) bool {
if len(s[i]) != len(s[j]) {
return len(s[i]) < len(s[j])
}
return strings.Compare(s[i], s[j]) > 0
}
// Removes leading whitespaces
func trim(str string) string {
return strings.Trim(str, " ")
return strings.TrimSpace(str)
}
// Shortcut to strings.ToUpper()
@@ -188,15 +121,20 @@ func lower(str string) string {
}
// Camelize an uppercased word
func Camelize(word string) (camelized string) {
func Camelize(word string) string {
camelized := poolOfBuffers.BorrowBuffer(len(word))
defer func() {
poolOfBuffers.RedeemBuffer(camelized)
}()
for pos, ru := range []rune(word) {
if pos > 0 {
camelized += string(unicode.ToLower(ru))
camelized.WriteRune(unicode.ToLower(ru))
} else {
camelized += string(unicode.ToUpper(ru))
camelized.WriteRune(unicode.ToUpper(ru))
}
}
return
return camelized.String()
}
// ToFileName lowercases and underscores a go type name
@@ -224,33 +162,40 @@ func ToCommandName(name string) string {
// ToHumanNameLower represents a code name as a human series of words
func ToHumanNameLower(name string) string {
in := newSplitter(withPostSplitInitialismCheck).split(name)
out := make([]string, 0, len(in))
s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
in := s.split(name)
poolOfSplitters.RedeemSplitter(s)
out := make([]string, 0, len(*in))
for _, w := range in {
for _, w := range *in {
if !w.IsInitialism() {
out = append(out, lower(w.GetOriginal()))
} else {
out = append(out, w.GetOriginal())
out = append(out, trim(w.GetOriginal()))
}
}
poolOfLexems.RedeemLexems(in)
return strings.Join(out, " ")
}
// ToHumanNameTitle represents a code name as a human series of words with the first letters titleized
func ToHumanNameTitle(name string) string {
in := newSplitter(withPostSplitInitialismCheck).split(name)
s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
in := s.split(name)
poolOfSplitters.RedeemSplitter(s)
out := make([]string, 0, len(in))
for _, w := range in {
original := w.GetOriginal()
out := make([]string, 0, len(*in))
for _, w := range *in {
original := trim(w.GetOriginal())
if !w.IsInitialism() {
out = append(out, Camelize(original))
} else {
out = append(out, original)
}
}
poolOfLexems.RedeemLexems(in)
return strings.Join(out, " ")
}
@@ -264,7 +209,7 @@ func ToJSONName(name string) string {
out = append(out, lower(w))
continue
}
out = append(out, Camelize(w))
out = append(out, Camelize(trim(w)))
}
return strings.Join(out, "")
}
@@ -283,35 +228,70 @@ func ToVarName(name string) string {
// ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes
func ToGoName(name string) string {
lexems := newSplitter(withPostSplitInitialismCheck).split(name)
s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
lexems := s.split(name)
poolOfSplitters.RedeemSplitter(s)
defer func() {
poolOfLexems.RedeemLexems(lexems)
}()
lexemes := *lexems
result := ""
for _, lexem := range lexems {
if len(lexemes) == 0 {
return ""
}
result := poolOfBuffers.BorrowBuffer(len(name))
defer func() {
poolOfBuffers.RedeemBuffer(result)
}()
// check if not starting with a letter, upper case
firstPart := lexemes[0].GetUnsafeGoName()
if lexemes[0].IsInitialism() {
firstPart = upper(firstPart)
}
if c := firstPart[0]; c < utf8.RuneSelf {
// ASCII
switch {
case 'A' <= c && c <= 'Z':
result.WriteString(firstPart)
case 'a' <= c && c <= 'z':
result.WriteByte(c - 'a' + 'A')
result.WriteString(firstPart[1:])
default:
result.WriteString(prefixFunc(name, firstPart))
// NOTE: no longer check if prefixFunc returns a string that starts with uppercase:
// assume this is always the case
}
} else {
// unicode
firstRune, _ := utf8.DecodeRuneInString(firstPart)
switch {
case !unicode.IsLetter(firstRune):
result.WriteString(prefixFunc(name, firstPart))
case !unicode.IsUpper(firstRune):
result.WriteString(prefixFunc(name, firstPart))
/*
result.WriteRune(unicode.ToUpper(firstRune))
result.WriteString(firstPart[offset:])
*/
default:
result.WriteString(firstPart)
}
}
for _, lexem := range lexemes[1:] {
goName := lexem.GetUnsafeGoName()
// to support old behavior
if lexem.IsInitialism() {
goName = upper(goName)
}
result += goName
result.WriteString(goName)
}
if len(result) > 0 {
// Only prefix with X when the first character isn't an ascii letter
first := []rune(result)[0]
if !unicode.IsLetter(first) || (first > unicode.MaxASCII && !unicode.IsUpper(first)) {
if GoNamePrefixFunc == nil {
return "X" + result
}
result = GoNamePrefixFunc(name) + result
}
first = []rune(result)[0]
if unicode.IsLetter(first) && !unicode.IsUpper(first) {
result = string(append([]rune{unicode.ToUpper(first)}, []rune(result)[1:]...))
}
}
return result
return result.String()
}
// ContainsStrings searches a slice of strings for a case-sensitive match
@@ -343,7 +323,7 @@ type zeroable interface {
func IsZero(data interface{}) bool {
v := reflect.ValueOf(data)
// check for nil data
switch v.Kind() {
switch v.Kind() { //nolint:exhaustive
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
if v.IsNil() {
return true
@@ -356,7 +336,7 @@ func IsZero(data interface{}) bool {
}
// continue with slightly more complex reflection
switch v.Kind() {
switch v.Kind() { //nolint:exhaustive
case reflect.String:
return v.Len() == 0
case reflect.Bool:
@@ -376,16 +356,6 @@ func IsZero(data interface{}) bool {
}
}
// AddInitialisms add additional initialisms
func AddInitialisms(words ...string) {
for _, word := range words {
// commonInitialisms[upper(word)] = true
commonInitialisms.add(upper(word))
}
// sort again
initialisms = commonInitialisms.sorted()
}
// CommandLineOptionsGroup represents a group of user-defined command line options
type CommandLineOptionsGroup struct {
ShortDescription string

View File

@@ -16,8 +16,11 @@ package swag
import (
"encoding/json"
"errors"
"fmt"
"path/filepath"
"reflect"
"sort"
"strconv"
"github.com/mailru/easyjson/jlexer"
@@ -48,7 +51,7 @@ func BytesToYAMLDoc(data []byte) (interface{}, error) {
return nil, err
}
if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode {
return nil, fmt.Errorf("only YAML documents that are objects are supported")
return nil, errors.New("only YAML documents that are objects are supported")
}
return &document, nil
}
@@ -147,7 +150,7 @@ func yamlScalar(node *yaml.Node) (interface{}, error) {
case yamlTimestamp:
return node.Value, nil
case yamlNull:
return nil, nil
return nil, nil //nolint:nilnil
default:
return nil, fmt.Errorf("YAML tag %q is not supported", node.LongTag())
}
@@ -245,7 +248,27 @@ func (s JSONMapSlice) MarshalYAML() (interface{}, error) {
return yaml.Marshal(&n)
}
func isNil(input interface{}) bool {
if input == nil {
return true
}
kind := reflect.TypeOf(input).Kind()
switch kind { //nolint:exhaustive
case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
return reflect.ValueOf(input).IsNil()
default:
return false
}
}
func json2yaml(item interface{}) (*yaml.Node, error) {
if isNil(item) {
return &yaml.Node{
Kind: yaml.ScalarNode,
Value: "null",
}, nil
}
switch val := item.(type) {
case JSONMapSlice:
var n yaml.Node
@@ -265,7 +288,14 @@ func json2yaml(item interface{}) (*yaml.Node, error) {
case map[string]interface{}:
var n yaml.Node
n.Kind = yaml.MappingNode
for k, v := range val {
keys := make([]string, 0, len(val))
for k := range val {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
v := val[k]
childNode, err := json2yaml(v)
if err != nil {
return nil, err
@@ -318,8 +348,9 @@ func json2yaml(item interface{}) (*yaml.Node, error) {
Tag: yamlBoolScalar,
Value: strconv.FormatBool(val),
}, nil
default:
return nil, fmt.Errorf("unhandled type: %T", val)
}
return nil, nil
}
// JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice

View File

@@ -1,191 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and
distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright
owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities
that control, are controlled by, or are under common control with that entity.
For the purposes of this definition, "control" means (i) the power, direct or
indirect, to cause the direction or management of such entity, whether by
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising
permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including
but not limited to software source code, documentation source, and configuration
files.
"Object" form shall mean any form resulting from mechanical transformation or
translation of a Source form, including but not limited to compiled object code,
generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made
available under the License, as indicated by a copyright notice that is included
in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that
is based on (or derived from) the Work and for which the editorial revisions,
annotations, elaborations, or other modifications represent, as a whole, an
original work of authorship. For the purposes of this License, Derivative Works
shall not include works that remain separable from, or merely link (or bind by
name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version
of the Work and any modifications or additions to that Work or Derivative Works
thereof, that is intentionally submitted to Licensor for inclusion in the Work
by the copyright owner or by an individual or Legal Entity authorized to submit
on behalf of the copyright owner. For the purposes of this definition,
"submitted" means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems, and
issue tracking systems that are managed by, or on behalf of, the Licensor for
the purpose of discussing and improving the Work, but excluding communication
that is conspicuously marked or otherwise designated in writing by the copyright
owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
of whom a Contribution has been received by Licensor and subsequently
incorporated within the Work.
2. Grant of Copyright License.
Subject to the terms and conditions of this License, each Contributor hereby
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the Work and such
Derivative Works in Source or Object form.
3. Grant of Patent License.
Subject to the terms and conditions of this License, each Contributor hereby
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable (except as stated in this section) patent license to make, have
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
such license applies only to those patent claims licensable by such Contributor
that are necessarily infringed by their Contribution(s) alone or by combination
of their Contribution(s) with the Work to which such Contribution(s) was
submitted. If You institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
Contribution incorporated within the Work constitutes direct or contributory
patent infringement, then any patent licenses granted to You under this License
for that Work shall terminate as of the date such litigation is filed.
4. Redistribution.
You may reproduce and distribute copies of the Work or Derivative Works thereof
in any medium, with or without modifications, and in Source or Object form,
provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of
this License; and
You must cause any modified files to carry prominent notices stating that You
changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute,
all copyright, patent, trademark, and attribution notices from the Source form
of the Work, excluding those notices that do not pertain to any part of the
Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any
Derivative Works that You distribute must include a readable copy of the
attribution notices contained within such NOTICE file, excluding those notices
that do not pertain to any part of the Derivative Works, in at least one of the
following places: within a NOTICE text file distributed as part of the
Derivative Works; within the Source form or documentation, if provided along
with the Derivative Works; or, within a display generated by the Derivative
Works, if and wherever such third-party notices normally appear. The contents of
the NOTICE file are for informational purposes only and do not modify the
License. You may add Your own attribution notices within Derivative Works that
You distribute, alongside or as an addendum to the NOTICE text from the Work,
provided that such additional attribution notices cannot be construed as
modifying the License.
You may add Your own copyright statement to Your modifications and may provide
additional or different license terms and conditions for use, reproduction, or
distribution of Your modifications, or for any such Derivative Works as a whole,
provided Your use, reproduction, and distribution of the Work otherwise complies
with the conditions stated in this License.
5. Submission of Contributions.
Unless You explicitly state otherwise, any Contribution intentionally submitted
for inclusion in the Work by You to the Licensor shall be under the terms and
conditions of this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify the terms of
any separate license agreement you may have executed with Licensor regarding
such Contributions.
6. Trademarks.
This License does not grant permission to use the trade names, trademarks,
service marks, or product names of the Licensor, except as required for
reasonable and customary use in describing the origin of the Work and
reproducing the content of the NOTICE file.
7. Disclaimer of Warranty.
Unless required by applicable law or agreed to in writing, Licensor provides the
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
including, without limitation, any warranties or conditions of TITLE,
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
solely responsible for determining the appropriateness of using or
redistributing the Work and assume any risks associated with Your exercise of
permissions under this License.
8. Limitation of Liability.
In no event and under no legal theory, whether in tort (including negligence),
contract, or otherwise, unless required by applicable law (such as deliberate
and grossly negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special, incidental,
or consequential damages of any character arising as a result of this License or
out of the use or inability to use the Work (including but not limited to
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
any and all other commercial damages or losses), even if such Contributor has
been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability.
While redistributing the Work or Derivative Works thereof, You may choose to
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
other liability obligations and/or rights consistent with this License. However,
in accepting such obligations, You may act only on Your own behalf and on Your
sole responsibility, not on behalf of any other Contributor, and only if You
agree to indemnify, defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason of your
accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work
To apply the Apache License to your work, attach the following boilerplate
notice, with the fields enclosed by brackets "[]" replaced with your own
identifying information. (Don't include the brackets!) The text should be
enclosed in the appropriate comment syntax for the file format. We also
recommend that a file or class name and description of purpose be included on
the same "printed page" as the copyright notice for easier identification within
third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,133 +0,0 @@
/*
Copyright 2013 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package lru implements an LRU cache.
package lru
import "container/list"
// Cache is an LRU cache. It is not safe for concurrent access.
type Cache struct {
// MaxEntries is the maximum number of cache entries before
// an item is evicted. Zero means no limit.
MaxEntries int
// OnEvicted optionally specifies a callback function to be
// executed when an entry is purged from the cache.
OnEvicted func(key Key, value interface{})
ll *list.List
cache map[interface{}]*list.Element
}
// A Key may be any value that is comparable. See http://golang.org/ref/spec#Comparison_operators
type Key interface{}
type entry struct {
key Key
value interface{}
}
// New creates a new Cache.
// If maxEntries is zero, the cache has no limit and it's assumed
// that eviction is done by the caller.
func New(maxEntries int) *Cache {
return &Cache{
MaxEntries: maxEntries,
ll: list.New(),
cache: make(map[interface{}]*list.Element),
}
}
// Add adds a value to the cache.
func (c *Cache) Add(key Key, value interface{}) {
if c.cache == nil {
c.cache = make(map[interface{}]*list.Element)
c.ll = list.New()
}
if ee, ok := c.cache[key]; ok {
c.ll.MoveToFront(ee)
ee.Value.(*entry).value = value
return
}
ele := c.ll.PushFront(&entry{key, value})
c.cache[key] = ele
if c.MaxEntries != 0 && c.ll.Len() > c.MaxEntries {
c.RemoveOldest()
}
}
// Get looks up a key's value from the cache.
func (c *Cache) Get(key Key) (value interface{}, ok bool) {
if c.cache == nil {
return
}
if ele, hit := c.cache[key]; hit {
c.ll.MoveToFront(ele)
return ele.Value.(*entry).value, true
}
return
}
// Remove removes the provided key from the cache.
func (c *Cache) Remove(key Key) {
if c.cache == nil {
return
}
if ele, hit := c.cache[key]; hit {
c.removeElement(ele)
}
}
// RemoveOldest removes the oldest item from the cache.
func (c *Cache) RemoveOldest() {
if c.cache == nil {
return
}
ele := c.ll.Back()
if ele != nil {
c.removeElement(ele)
}
}
func (c *Cache) removeElement(e *list.Element) {
c.ll.Remove(e)
kv := e.Value.(*entry)
delete(c.cache, kv.key)
if c.OnEvicted != nil {
c.OnEvicted(kv.key, kv.value)
}
}
// Len returns the number of items in the cache.
func (c *Cache) Len() int {
if c.cache == nil {
return 0
}
return c.ll.Len()
}
// Clear purges all stored items from the cache.
func (c *Cache) Clear() {
if c.OnEvicted != nil {
for _, e := range c.cache {
kv := e.Value.(*entry)
c.OnEvicted(kv.key, kv.value)
}
}
c.ll = nil
c.cache = nil
}

1
vendor/github.com/google/btree/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1 @@
language: go

202
vendor/github.com/google/btree/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

12
vendor/github.com/google/btree/README.md generated vendored Normal file
View File

@@ -0,0 +1,12 @@
# BTree implementation for Go
![Travis CI Build Status](https://api.travis-ci.org/google/btree.svg?branch=master)
This package provides an in-memory B-Tree implementation for Go, useful as
an ordered, mutable data structure.
The API is based off of the wonderful
http://godoc.org/github.com/petar/GoLLRB/llrb, and is meant to allow btree to
act as a drop-in replacement for gollrb trees.
See http://godoc.org/github.com/google/btree for documentation.

890
vendor/github.com/google/btree/btree.go generated vendored Normal file
View File

@@ -0,0 +1,890 @@
// Copyright 2014 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package btree implements in-memory B-Trees of arbitrary degree.
//
// btree implements an in-memory B-Tree for use as an ordered data structure.
// It is not meant for persistent storage solutions.
//
// It has a flatter structure than an equivalent red-black or other binary tree,
// which in some cases yields better memory usage and/or performance.
// See some discussion on the matter here:
// http://google-opensource.blogspot.com/2013/01/c-containers-that-save-memory-and-time.html
// Note, though, that this project is in no way related to the C++ B-Tree
// implementation written about there.
//
// Within this tree, each node contains a slice of items and a (possibly nil)
// slice of children. For basic numeric values or raw structs, this can cause
// efficiency differences when compared to equivalent C++ template code that
// stores values in arrays within the node:
// * Due to the overhead of storing values as interfaces (each
// value needs to be stored as the value itself, then 2 words for the
// interface pointing to that value and its type), resulting in higher
// memory use.
// * Since interfaces can point to values anywhere in memory, values are
// most likely not stored in contiguous blocks, resulting in a higher
// number of cache misses.
// These issues don't tend to matter, though, when working with strings or other
// heap-allocated structures, since C++-equivalent structures also must store
// pointers and also distribute their values across the heap.
//
// This implementation is designed to be a drop-in replacement to gollrb.LLRB
// trees, (http://github.com/petar/gollrb), an excellent and probably the most
// widely used ordered tree implementation in the Go ecosystem currently.
// Its functions, therefore, exactly mirror those of
// llrb.LLRB where possible. Unlike gollrb, though, we currently don't
// support storing multiple equivalent values.
package btree
import (
"fmt"
"io"
"sort"
"strings"
"sync"
)
// Item represents a single object in the tree.
type Item interface {
// Less tests whether the current item is less than the given argument.
//
// This must provide a strict weak ordering.
// If !a.Less(b) && !b.Less(a), we treat this to mean a == b (i.e. we can only
// hold one of either a or b in the tree).
Less(than Item) bool
}
const (
DefaultFreeListSize = 32
)
var (
nilItems = make(items, 16)
nilChildren = make(children, 16)
)
// FreeList represents a free list of btree nodes. By default each
// BTree has its own FreeList, but multiple BTrees can share the same
// FreeList.
// Two Btrees using the same freelist are safe for concurrent write access.
type FreeList struct {
mu sync.Mutex
freelist []*node
}
// NewFreeList creates a new free list.
// size is the maximum size of the returned free list.
func NewFreeList(size int) *FreeList {
return &FreeList{freelist: make([]*node, 0, size)}
}
func (f *FreeList) newNode() (n *node) {
f.mu.Lock()
index := len(f.freelist) - 1
if index < 0 {
f.mu.Unlock()
return new(node)
}
n = f.freelist[index]
f.freelist[index] = nil
f.freelist = f.freelist[:index]
f.mu.Unlock()
return
}
// freeNode adds the given node to the list, returning true if it was added
// and false if it was discarded.
func (f *FreeList) freeNode(n *node) (out bool) {
f.mu.Lock()
if len(f.freelist) < cap(f.freelist) {
f.freelist = append(f.freelist, n)
out = true
}
f.mu.Unlock()
return
}
// ItemIterator allows callers of Ascend* to iterate in-order over portions of
// the tree. When this function returns false, iteration will stop and the
// associated Ascend* function will immediately return.
type ItemIterator func(i Item) bool
// New creates a new B-Tree with the given degree.
//
// New(2), for example, will create a 2-3-4 tree (each node contains 1-3 items
// and 2-4 children).
func New(degree int) *BTree {
return NewWithFreeList(degree, NewFreeList(DefaultFreeListSize))
}
// NewWithFreeList creates a new B-Tree that uses the given node free list.
func NewWithFreeList(degree int, f *FreeList) *BTree {
if degree <= 1 {
panic("bad degree")
}
return &BTree{
degree: degree,
cow: &copyOnWriteContext{freelist: f},
}
}
// items stores items in a node.
type items []Item
// insertAt inserts a value into the given index, pushing all subsequent values
// forward.
func (s *items) insertAt(index int, item Item) {
*s = append(*s, nil)
if index < len(*s) {
copy((*s)[index+1:], (*s)[index:])
}
(*s)[index] = item
}
// removeAt removes a value at a given index, pulling all subsequent values
// back.
func (s *items) removeAt(index int) Item {
item := (*s)[index]
copy((*s)[index:], (*s)[index+1:])
(*s)[len(*s)-1] = nil
*s = (*s)[:len(*s)-1]
return item
}
// pop removes and returns the last element in the list.
func (s *items) pop() (out Item) {
index := len(*s) - 1
out = (*s)[index]
(*s)[index] = nil
*s = (*s)[:index]
return
}
// truncate truncates this instance at index so that it contains only the
// first index items. index must be less than or equal to length.
func (s *items) truncate(index int) {
var toClear items
*s, toClear = (*s)[:index], (*s)[index:]
for len(toClear) > 0 {
toClear = toClear[copy(toClear, nilItems):]
}
}
// find returns the index where the given item should be inserted into this
// list. 'found' is true if the item already exists in the list at the given
// index.
func (s items) find(item Item) (index int, found bool) {
i := sort.Search(len(s), func(i int) bool {
return item.Less(s[i])
})
if i > 0 && !s[i-1].Less(item) {
return i - 1, true
}
return i, false
}
// children stores child nodes in a node.
type children []*node
// insertAt inserts a value into the given index, pushing all subsequent values
// forward.
func (s *children) insertAt(index int, n *node) {
*s = append(*s, nil)
if index < len(*s) {
copy((*s)[index+1:], (*s)[index:])
}
(*s)[index] = n
}
// removeAt removes a value at a given index, pulling all subsequent values
// back.
func (s *children) removeAt(index int) *node {
n := (*s)[index]
copy((*s)[index:], (*s)[index+1:])
(*s)[len(*s)-1] = nil
*s = (*s)[:len(*s)-1]
return n
}
// pop removes and returns the last element in the list.
func (s *children) pop() (out *node) {
index := len(*s) - 1
out = (*s)[index]
(*s)[index] = nil
*s = (*s)[:index]
return
}
// truncate truncates this instance at index so that it contains only the
// first index children. index must be less than or equal to length.
func (s *children) truncate(index int) {
var toClear children
*s, toClear = (*s)[:index], (*s)[index:]
for len(toClear) > 0 {
toClear = toClear[copy(toClear, nilChildren):]
}
}
// node is an internal node in a tree.
//
// It must at all times maintain the invariant that either
// * len(children) == 0, len(items) unconstrained
// * len(children) == len(items) + 1
type node struct {
items items
children children
cow *copyOnWriteContext
}
func (n *node) mutableFor(cow *copyOnWriteContext) *node {
if n.cow == cow {
return n
}
out := cow.newNode()
if cap(out.items) >= len(n.items) {
out.items = out.items[:len(n.items)]
} else {
out.items = make(items, len(n.items), cap(n.items))
}
copy(out.items, n.items)
// Copy children
if cap(out.children) >= len(n.children) {
out.children = out.children[:len(n.children)]
} else {
out.children = make(children, len(n.children), cap(n.children))
}
copy(out.children, n.children)
return out
}
func (n *node) mutableChild(i int) *node {
c := n.children[i].mutableFor(n.cow)
n.children[i] = c
return c
}
// split splits the given node at the given index. The current node shrinks,
// and this function returns the item that existed at that index and a new node
// containing all items/children after it.
func (n *node) split(i int) (Item, *node) {
item := n.items[i]
next := n.cow.newNode()
next.items = append(next.items, n.items[i+1:]...)
n.items.truncate(i)
if len(n.children) > 0 {
next.children = append(next.children, n.children[i+1:]...)
n.children.truncate(i + 1)
}
return item, next
}
// maybeSplitChild checks if a child should be split, and if so splits it.
// Returns whether or not a split occurred.
func (n *node) maybeSplitChild(i, maxItems int) bool {
if len(n.children[i].items) < maxItems {
return false
}
first := n.mutableChild(i)
item, second := first.split(maxItems / 2)
n.items.insertAt(i, item)
n.children.insertAt(i+1, second)
return true
}
// insert inserts an item into the subtree rooted at this node, making sure
// no nodes in the subtree exceed maxItems items. Should an equivalent item be
// be found/replaced by insert, it will be returned.
func (n *node) insert(item Item, maxItems int) Item {
i, found := n.items.find(item)
if found {
out := n.items[i]
n.items[i] = item
return out
}
if len(n.children) == 0 {
n.items.insertAt(i, item)
return nil
}
if n.maybeSplitChild(i, maxItems) {
inTree := n.items[i]
switch {
case item.Less(inTree):
// no change, we want first split node
case inTree.Less(item):
i++ // we want second split node
default:
out := n.items[i]
n.items[i] = item
return out
}
}
return n.mutableChild(i).insert(item, maxItems)
}
// get finds the given key in the subtree and returns it.
func (n *node) get(key Item) Item {
i, found := n.items.find(key)
if found {
return n.items[i]
} else if len(n.children) > 0 {
return n.children[i].get(key)
}
return nil
}
// min returns the first item in the subtree.
func min(n *node) Item {
if n == nil {
return nil
}
for len(n.children) > 0 {
n = n.children[0]
}
if len(n.items) == 0 {
return nil
}
return n.items[0]
}
// max returns the last item in the subtree.
func max(n *node) Item {
if n == nil {
return nil
}
for len(n.children) > 0 {
n = n.children[len(n.children)-1]
}
if len(n.items) == 0 {
return nil
}
return n.items[len(n.items)-1]
}
// toRemove details what item to remove in a node.remove call.
type toRemove int
const (
removeItem toRemove = iota // removes the given item
removeMin // removes smallest item in the subtree
removeMax // removes largest item in the subtree
)
// remove removes an item from the subtree rooted at this node.
func (n *node) remove(item Item, minItems int, typ toRemove) Item {
var i int
var found bool
switch typ {
case removeMax:
if len(n.children) == 0 {
return n.items.pop()
}
i = len(n.items)
case removeMin:
if len(n.children) == 0 {
return n.items.removeAt(0)
}
i = 0
case removeItem:
i, found = n.items.find(item)
if len(n.children) == 0 {
if found {
return n.items.removeAt(i)
}
return nil
}
default:
panic("invalid type")
}
// If we get to here, we have children.
if len(n.children[i].items) <= minItems {
return n.growChildAndRemove(i, item, minItems, typ)
}
child := n.mutableChild(i)
// Either we had enough items to begin with, or we've done some
// merging/stealing, because we've got enough now and we're ready to return
// stuff.
if found {
// The item exists at index 'i', and the child we've selected can give us a
// predecessor, since if we've gotten here it's got > minItems items in it.
out := n.items[i]
// We use our special-case 'remove' call with typ=maxItem to pull the
// predecessor of item i (the rightmost leaf of our immediate left child)
// and set it into where we pulled the item from.
n.items[i] = child.remove(nil, minItems, removeMax)
return out
}
// Final recursive call. Once we're here, we know that the item isn't in this
// node and that the child is big enough to remove from.
return child.remove(item, minItems, typ)
}
// growChildAndRemove grows child 'i' to make sure it's possible to remove an
// item from it while keeping it at minItems, then calls remove to actually
// remove it.
//
// Most documentation says we have to do two sets of special casing:
// 1) item is in this node
// 2) item is in child
// In both cases, we need to handle the two subcases:
// A) node has enough values that it can spare one
// B) node doesn't have enough values
// For the latter, we have to check:
// a) left sibling has node to spare
// b) right sibling has node to spare
// c) we must merge
// To simplify our code here, we handle cases #1 and #2 the same:
// If a node doesn't have enough items, we make sure it does (using a,b,c).
// We then simply redo our remove call, and the second time (regardless of
// whether we're in case 1 or 2), we'll have enough items and can guarantee
// that we hit case A.
func (n *node) growChildAndRemove(i int, item Item, minItems int, typ toRemove) Item {
if i > 0 && len(n.children[i-1].items) > minItems {
// Steal from left child
child := n.mutableChild(i)
stealFrom := n.mutableChild(i - 1)
stolenItem := stealFrom.items.pop()
child.items.insertAt(0, n.items[i-1])
n.items[i-1] = stolenItem
if len(stealFrom.children) > 0 {
child.children.insertAt(0, stealFrom.children.pop())
}
} else if i < len(n.items) && len(n.children[i+1].items) > minItems {
// steal from right child
child := n.mutableChild(i)
stealFrom := n.mutableChild(i + 1)
stolenItem := stealFrom.items.removeAt(0)
child.items = append(child.items, n.items[i])
n.items[i] = stolenItem
if len(stealFrom.children) > 0 {
child.children = append(child.children, stealFrom.children.removeAt(0))
}
} else {
if i >= len(n.items) {
i--
}
child := n.mutableChild(i)
// merge with right child
mergeItem := n.items.removeAt(i)
mergeChild := n.children.removeAt(i + 1)
child.items = append(child.items, mergeItem)
child.items = append(child.items, mergeChild.items...)
child.children = append(child.children, mergeChild.children...)
n.cow.freeNode(mergeChild)
}
return n.remove(item, minItems, typ)
}
type direction int
const (
descend = direction(-1)
ascend = direction(+1)
)
// iterate provides a simple method for iterating over elements in the tree.
//
// When ascending, the 'start' should be less than 'stop' and when descending,
// the 'start' should be greater than 'stop'. Setting 'includeStart' to true
// will force the iterator to include the first item when it equals 'start',
// thus creating a "greaterOrEqual" or "lessThanEqual" rather than just a
// "greaterThan" or "lessThan" queries.
func (n *node) iterate(dir direction, start, stop Item, includeStart bool, hit bool, iter ItemIterator) (bool, bool) {
var ok, found bool
var index int
switch dir {
case ascend:
if start != nil {
index, _ = n.items.find(start)
}
for i := index; i < len(n.items); i++ {
if len(n.children) > 0 {
if hit, ok = n.children[i].iterate(dir, start, stop, includeStart, hit, iter); !ok {
return hit, false
}
}
if !includeStart && !hit && start != nil && !start.Less(n.items[i]) {
hit = true
continue
}
hit = true
if stop != nil && !n.items[i].Less(stop) {
return hit, false
}
if !iter(n.items[i]) {
return hit, false
}
}
if len(n.children) > 0 {
if hit, ok = n.children[len(n.children)-1].iterate(dir, start, stop, includeStart, hit, iter); !ok {
return hit, false
}
}
case descend:
if start != nil {
index, found = n.items.find(start)
if !found {
index = index - 1
}
} else {
index = len(n.items) - 1
}
for i := index; i >= 0; i-- {
if start != nil && !n.items[i].Less(start) {
if !includeStart || hit || start.Less(n.items[i]) {
continue
}
}
if len(n.children) > 0 {
if hit, ok = n.children[i+1].iterate(dir, start, stop, includeStart, hit, iter); !ok {
return hit, false
}
}
if stop != nil && !stop.Less(n.items[i]) {
return hit, false // continue
}
hit = true
if !iter(n.items[i]) {
return hit, false
}
}
if len(n.children) > 0 {
if hit, ok = n.children[0].iterate(dir, start, stop, includeStart, hit, iter); !ok {
return hit, false
}
}
}
return hit, true
}
// Used for testing/debugging purposes.
func (n *node) print(w io.Writer, level int) {
fmt.Fprintf(w, "%sNODE:%v\n", strings.Repeat(" ", level), n.items)
for _, c := range n.children {
c.print(w, level+1)
}
}
// BTree is an implementation of a B-Tree.
//
// BTree stores Item instances in an ordered structure, allowing easy insertion,
// removal, and iteration.
//
// Write operations are not safe for concurrent mutation by multiple
// goroutines, but Read operations are.
type BTree struct {
degree int
length int
root *node
cow *copyOnWriteContext
}
// copyOnWriteContext pointers determine node ownership... a tree with a write
// context equivalent to a node's write context is allowed to modify that node.
// A tree whose write context does not match a node's is not allowed to modify
// it, and must create a new, writable copy (IE: it's a Clone).
//
// When doing any write operation, we maintain the invariant that the current
// node's context is equal to the context of the tree that requested the write.
// We do this by, before we descend into any node, creating a copy with the
// correct context if the contexts don't match.
//
// Since the node we're currently visiting on any write has the requesting
// tree's context, that node is modifiable in place. Children of that node may
// not share context, but before we descend into them, we'll make a mutable
// copy.
type copyOnWriteContext struct {
freelist *FreeList
}
// Clone clones the btree, lazily. Clone should not be called concurrently,
// but the original tree (t) and the new tree (t2) can be used concurrently
// once the Clone call completes.
//
// The internal tree structure of b is marked read-only and shared between t and
// t2. Writes to both t and t2 use copy-on-write logic, creating new nodes
// whenever one of b's original nodes would have been modified. Read operations
// should have no performance degredation. Write operations for both t and t2
// will initially experience minor slow-downs caused by additional allocs and
// copies due to the aforementioned copy-on-write logic, but should converge to
// the original performance characteristics of the original tree.
func (t *BTree) Clone() (t2 *BTree) {
// Create two entirely new copy-on-write contexts.
// This operation effectively creates three trees:
// the original, shared nodes (old b.cow)
// the new b.cow nodes
// the new out.cow nodes
cow1, cow2 := *t.cow, *t.cow
out := *t
t.cow = &cow1
out.cow = &cow2
return &out
}
// maxItems returns the max number of items to allow per node.
func (t *BTree) maxItems() int {
return t.degree*2 - 1
}
// minItems returns the min number of items to allow per node (ignored for the
// root node).
func (t *BTree) minItems() int {
return t.degree - 1
}
func (c *copyOnWriteContext) newNode() (n *node) {
n = c.freelist.newNode()
n.cow = c
return
}
type freeType int
const (
ftFreelistFull freeType = iota // node was freed (available for GC, not stored in freelist)
ftStored // node was stored in the freelist for later use
ftNotOwned // node was ignored by COW, since it's owned by another one
)
// freeNode frees a node within a given COW context, if it's owned by that
// context. It returns what happened to the node (see freeType const
// documentation).
func (c *copyOnWriteContext) freeNode(n *node) freeType {
if n.cow == c {
// clear to allow GC
n.items.truncate(0)
n.children.truncate(0)
n.cow = nil
if c.freelist.freeNode(n) {
return ftStored
} else {
return ftFreelistFull
}
} else {
return ftNotOwned
}
}
// ReplaceOrInsert adds the given item to the tree. If an item in the tree
// already equals the given one, it is removed from the tree and returned.
// Otherwise, nil is returned.
//
// nil cannot be added to the tree (will panic).
func (t *BTree) ReplaceOrInsert(item Item) Item {
if item == nil {
panic("nil item being added to BTree")
}
if t.root == nil {
t.root = t.cow.newNode()
t.root.items = append(t.root.items, item)
t.length++
return nil
} else {
t.root = t.root.mutableFor(t.cow)
if len(t.root.items) >= t.maxItems() {
item2, second := t.root.split(t.maxItems() / 2)
oldroot := t.root
t.root = t.cow.newNode()
t.root.items = append(t.root.items, item2)
t.root.children = append(t.root.children, oldroot, second)
}
}
out := t.root.insert(item, t.maxItems())
if out == nil {
t.length++
}
return out
}
// Delete removes an item equal to the passed in item from the tree, returning
// it. If no such item exists, returns nil.
func (t *BTree) Delete(item Item) Item {
return t.deleteItem(item, removeItem)
}
// DeleteMin removes the smallest item in the tree and returns it.
// If no such item exists, returns nil.
func (t *BTree) DeleteMin() Item {
return t.deleteItem(nil, removeMin)
}
// DeleteMax removes the largest item in the tree and returns it.
// If no such item exists, returns nil.
func (t *BTree) DeleteMax() Item {
return t.deleteItem(nil, removeMax)
}
func (t *BTree) deleteItem(item Item, typ toRemove) Item {
if t.root == nil || len(t.root.items) == 0 {
return nil
}
t.root = t.root.mutableFor(t.cow)
out := t.root.remove(item, t.minItems(), typ)
if len(t.root.items) == 0 && len(t.root.children) > 0 {
oldroot := t.root
t.root = t.root.children[0]
t.cow.freeNode(oldroot)
}
if out != nil {
t.length--
}
return out
}
// AscendRange calls the iterator for every value in the tree within the range
// [greaterOrEqual, lessThan), until iterator returns false.
func (t *BTree) AscendRange(greaterOrEqual, lessThan Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(ascend, greaterOrEqual, lessThan, true, false, iterator)
}
// AscendLessThan calls the iterator for every value in the tree within the range
// [first, pivot), until iterator returns false.
func (t *BTree) AscendLessThan(pivot Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(ascend, nil, pivot, false, false, iterator)
}
// AscendGreaterOrEqual calls the iterator for every value in the tree within
// the range [pivot, last], until iterator returns false.
func (t *BTree) AscendGreaterOrEqual(pivot Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(ascend, pivot, nil, true, false, iterator)
}
// Ascend calls the iterator for every value in the tree within the range
// [first, last], until iterator returns false.
func (t *BTree) Ascend(iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(ascend, nil, nil, false, false, iterator)
}
// DescendRange calls the iterator for every value in the tree within the range
// [lessOrEqual, greaterThan), until iterator returns false.
func (t *BTree) DescendRange(lessOrEqual, greaterThan Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(descend, lessOrEqual, greaterThan, true, false, iterator)
}
// DescendLessOrEqual calls the iterator for every value in the tree within the range
// [pivot, first], until iterator returns false.
func (t *BTree) DescendLessOrEqual(pivot Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(descend, pivot, nil, true, false, iterator)
}
// DescendGreaterThan calls the iterator for every value in the tree within
// the range [last, pivot), until iterator returns false.
func (t *BTree) DescendGreaterThan(pivot Item, iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(descend, nil, pivot, false, false, iterator)
}
// Descend calls the iterator for every value in the tree within the range
// [last, first], until iterator returns false.
func (t *BTree) Descend(iterator ItemIterator) {
if t.root == nil {
return
}
t.root.iterate(descend, nil, nil, false, false, iterator)
}
// Get looks for the key item in the tree, returning it. It returns nil if
// unable to find that item.
func (t *BTree) Get(key Item) Item {
if t.root == nil {
return nil
}
return t.root.get(key)
}
// Min returns the smallest item in the tree, or nil if the tree is empty.
func (t *BTree) Min() Item {
return min(t.root)
}
// Max returns the largest item in the tree, or nil if the tree is empty.
func (t *BTree) Max() Item {
return max(t.root)
}
// Has returns true if the given key is in the tree.
func (t *BTree) Has(key Item) bool {
return t.Get(key) != nil
}
// Len returns the number of items currently in the tree.
func (t *BTree) Len() int {
return t.length
}
// Clear removes all items from the btree. If addNodesToFreelist is true,
// t's nodes are added to its freelist as part of this call, until the freelist
// is full. Otherwise, the root node is simply dereferenced and the subtree
// left to Go's normal GC processes.
//
// This can be much faster
// than calling Delete on all elements, because that requires finding/removing
// each element in the tree and updating the tree accordingly. It also is
// somewhat faster than creating a new tree to replace the old one, because
// nodes from the old tree are reclaimed into the freelist for use by the new
// one, instead of being lost to the garbage collector.
//
// This call takes:
// O(1): when addNodesToFreelist is false, this is a single operation.
// O(1): when the freelist is already full, it breaks out immediately
// O(freelist size): when the freelist is empty and the nodes are all owned
// by this tree, nodes are added to the freelist until full.
// O(tree size): when all nodes are owned by another tree, all nodes are
// iterated over looking for nodes to add to the freelist, and due to
// ownership, none are.
func (t *BTree) Clear(addNodesToFreelist bool) {
if t.root != nil && addNodesToFreelist {
t.root.reset(t.cow)
}
t.root, t.length = nil, 0
}
// reset returns a subtree to the freelist. It breaks out immediately if the
// freelist is full, since the only benefit of iterating is to fill that
// freelist up. Returns true if parent reset call should continue.
func (n *node) reset(c *copyOnWriteContext) bool {
for _, child := range n.children {
if !child.reset(c) {
return false
}
}
return c.freeNode(n) != ftFreelistFull
}
// Int implements the Item interface for integers.
type Int int
// Less returns true if int(a) < int(b).
func (a Int) Less(b Item) bool {
return a < b.(Int)
}

View File

@@ -39,6 +39,7 @@ go_library(
"//common/types/traits:go_default_library",
"//interpreter:go_default_library",
"//parser:go_default_library",
"@dev_cel_expr//:expr",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//reflect/protodesc:go_default_library",
@@ -81,7 +82,6 @@ go_test(
"//test:go_default_library",
"//test/proto2pb:go_default_library",
"//test/proto3pb:go_default_library",
"@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//encoding/prototext:go_default_library",

View File

@@ -23,6 +23,7 @@ import (
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
celpb "cel.dev/expr"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
@@ -312,20 +313,34 @@ func ExprTypeToType(t *exprpb.Type) (*Type, error) {
// ExprDeclToDeclaration converts a protobuf CEL declaration to a CEL-native declaration, either a Variable or Function.
func ExprDeclToDeclaration(d *exprpb.Decl) (EnvOption, error) {
return AlphaProtoAsDeclaration(d)
}
// AlphaProtoAsDeclaration converts a v1alpha1.Decl value describing a variable or function into an EnvOption.
func AlphaProtoAsDeclaration(d *exprpb.Decl) (EnvOption, error) {
canonical := &celpb.Decl{}
if err := convertProto(d, canonical); err != nil {
return nil, err
}
return ProtoAsDeclaration(canonical)
}
// ProtoAsDeclaration converts a canonical celpb.Decl value describing a variable or function into an EnvOption.
func ProtoAsDeclaration(d *celpb.Decl) (EnvOption, error) {
switch d.GetDeclKind().(type) {
case *exprpb.Decl_Function:
case *celpb.Decl_Function:
overloads := d.GetFunction().GetOverloads()
opts := make([]FunctionOpt, len(overloads))
for i, o := range overloads {
args := make([]*Type, len(o.GetParams()))
for j, p := range o.GetParams() {
a, err := types.ExprTypeToType(p)
a, err := types.ProtoAsType(p)
if err != nil {
return nil, err
}
args[j] = a
}
res, err := types.ExprTypeToType(o.GetResultType())
res, err := types.ProtoAsType(o.GetResultType())
if err != nil {
return nil, err
}
@@ -336,15 +351,15 @@ func ExprDeclToDeclaration(d *exprpb.Decl) (EnvOption, error) {
}
}
return Function(d.GetName(), opts...), nil
case *exprpb.Decl_Ident:
t, err := types.ExprTypeToType(d.GetIdent().GetType())
case *celpb.Decl_Ident:
t, err := types.ProtoAsType(d.GetIdent().GetType())
if err != nil {
return nil, err
}
if d.GetIdent().GetValue() == nil {
return Variable(d.GetName(), t), nil
}
val, err := ast.ConstantToVal(d.GetIdent().GetValue())
val, err := ast.ProtoConstantAsVal(d.GetIdent().GetValue())
if err != nil {
return nil, err
}

View File

@@ -44,6 +44,9 @@ type Ast struct {
// NativeRep converts the AST to a Go-native representation.
func (ast *Ast) NativeRep() *celast.AST {
if ast == nil {
return nil
}
return ast.impl
}
@@ -55,16 +58,13 @@ func (ast *Ast) Expr() *exprpb.Expr {
if ast == nil {
return nil
}
pbExpr, _ := celast.ExprToProto(ast.impl.Expr())
pbExpr, _ := celast.ExprToProto(ast.NativeRep().Expr())
return pbExpr
}
// IsChecked returns whether the Ast value has been successfully type-checked.
func (ast *Ast) IsChecked() bool {
if ast == nil {
return false
}
return ast.impl.IsChecked()
return ast.NativeRep().IsChecked()
}
// SourceInfo returns character offset and newline position information about expression elements.
@@ -72,7 +72,7 @@ func (ast *Ast) SourceInfo() *exprpb.SourceInfo {
if ast == nil {
return nil
}
pbInfo, _ := celast.SourceInfoToProto(ast.impl.SourceInfo())
pbInfo, _ := celast.SourceInfoToProto(ast.NativeRep().SourceInfo())
return pbInfo
}
@@ -95,7 +95,7 @@ func (ast *Ast) OutputType() *Type {
if ast == nil {
return types.ErrorType
}
return ast.impl.GetType(ast.impl.Expr().ID())
return ast.NativeRep().GetType(ast.NativeRep().Expr().ID())
}
// Source returns a view of the input used to create the Ast. This source may be complete or
@@ -218,12 +218,12 @@ func (e *Env) Check(ast *Ast) (*Ast, *Issues) {
if err != nil {
errs := common.NewErrors(ast.Source())
errs.ReportError(common.NoLocation, err.Error())
return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
return nil, NewIssuesWithSourceInfo(errs, ast.NativeRep().SourceInfo())
}
checked, errs := checker.Check(ast.impl, ast.Source(), chk)
checked, errs := checker.Check(ast.NativeRep(), ast.Source(), chk)
if len(errs.GetErrors()) > 0 {
return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
return nil, NewIssuesWithSourceInfo(errs, ast.NativeRep().SourceInfo())
}
// Manually create the Ast to ensure that the Ast source information (which may be more
// detailed than the information provided by Check), is returned to the caller.
@@ -244,7 +244,7 @@ func (e *Env) Check(ast *Ast) (*Ast, *Issues) {
}
}
// Apply additional validators on the type-checked result.
iss := NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
iss := NewIssuesWithSourceInfo(errs, ast.NativeRep().SourceInfo())
for _, v := range e.validators {
v.Validate(e, vConfig, checked, iss)
}
@@ -309,17 +309,13 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) {
copy(chkOptsCopy, e.chkOpts)
// Copy the declarations if needed.
varsCopy := []*decls.VariableDecl{}
if chk != nil {
// If the type-checker has already been instantiated, then the e.declarations have been
// validated within the chk instance.
chkOptsCopy = append(chkOptsCopy, checker.ValidatedDeclarations(chk))
} else {
// If the type-checker has not been instantiated, ensure the unvalidated declarations are
// provided to the extended Env instance.
varsCopy = make([]*decls.VariableDecl, len(e.variables))
copy(varsCopy, e.variables)
}
varsCopy := make([]*decls.VariableDecl, len(e.variables))
copy(varsCopy, e.variables)
// Copy macros and program options
macsCopy := make([]parser.Macro, len(e.macros))
@@ -416,6 +412,17 @@ func (e *Env) Libraries() []string {
return libraries
}
// HasFunction returns whether a specific function has been configured in the environment
func (e *Env) HasFunction(functionName string) bool {
_, ok := e.functions[functionName]
return ok
}
// Functions returns map of Functions, keyed by function name, that have been configured in the environment.
func (e *Env) Functions() map[string]*decls.FunctionDecl {
return e.functions
}
// HasValidator returns whether a specific ASTValidator has been configured in the environment.
func (e *Env) HasValidator(name string) bool {
for _, v := range e.validators {
@@ -452,6 +459,12 @@ func (e *Env) ParseSource(src Source) (*Ast, *Issues) {
// Program generates an evaluable instance of the Ast within the environment (Env).
func (e *Env) Program(ast *Ast, opts ...ProgramOption) (Program, error) {
return e.PlanProgram(ast.NativeRep(), opts...)
}
// PlanProgram generates an evaluable instance of the AST in the go-native representation within
// the environment (Env).
func (e *Env) PlanProgram(a *celast.AST, opts ...ProgramOption) (Program, error) {
optSet := e.progOpts
if len(opts) != 0 {
mergedOpts := []ProgramOption{}
@@ -459,7 +472,7 @@ func (e *Env) Program(ast *Ast, opts ...ProgramOption) (Program, error) {
mergedOpts = append(mergedOpts, opts...)
optSet = mergedOpts
}
return newProgram(e, ast, optSet)
return newProgram(e, a, optSet)
}
// CELTypeAdapter returns the `types.Adapter` configured for the environment.
@@ -753,10 +766,10 @@ func (i *Issues) Append(other *Issues) *Issues {
if i == nil {
return other
}
if other == nil {
if other == nil || i == other {
return i
}
return NewIssues(i.errs.Append(other.errs.GetErrors()))
return NewIssuesWithSourceInfo(i.errs.Append(other.errs.GetErrors()), i.info)
}
// String converts the issues to a suitable display string.
@@ -790,7 +803,7 @@ type interopCELTypeProvider struct {
// FindStructType returns a types.Type instance for the given fully-qualified typeName if one exists.
//
// This method proxies to the underyling ref.TypeProvider's FindType method and converts protobuf type
// This method proxies to the underlying ref.TypeProvider's FindType method and converts protobuf type
// into a native type representation. If the conversion fails, the type is listed as not found.
func (p *interopCELTypeProvider) FindStructType(typeName string) (*types.Type, bool) {
if et, found := p.FindType(typeName); found {
@@ -813,7 +826,7 @@ func (p *interopCELTypeProvider) FindStructFieldNames(typeName string) ([]string
// FindStructFieldType returns a types.FieldType instance for the given fully-qualified typeName and field
// name, if one exists.
//
// This method proxies to the underyling ref.TypeProvider's FindFieldType method and converts protobuf type
// This method proxies to the underlying ref.TypeProvider's FindFieldType method and converts protobuf type
// into a native type representation. If the conversion fails, the type is listed as not found.
func (p *interopCELTypeProvider) FindStructFieldType(structType, fieldName string) (*types.FieldType, bool) {
if ft, found := p.FindFieldType(structType, fieldName); found {

View File

@@ -28,6 +28,7 @@ import (
"github.com/google/cel-go/common/types/traits"
"github.com/google/cel-go/parser"
celpb "cel.dev/expr"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
anypb "google.golang.org/protobuf/types/known/anypb"
)
@@ -104,72 +105,86 @@ func AstToString(a *Ast) (string, error) {
// RefValueToValue converts between ref.Val and api.expr.Value.
// The result Value is the serialized proto form. The ref.Val must not be error or unknown.
func RefValueToValue(res ref.Val) (*exprpb.Value, error) {
return ValueAsAlphaProto(res)
}
func ValueAsAlphaProto(res ref.Val) (*exprpb.Value, error) {
canonical, err := ValueAsProto(res)
if err != nil {
return nil, err
}
alpha := &exprpb.Value{}
err = convertProto(canonical, alpha)
return alpha, err
}
func ValueAsProto(res ref.Val) (*celpb.Value, error) {
switch res.Type() {
case types.BoolType:
return &exprpb.Value{
Kind: &exprpb.Value_BoolValue{BoolValue: res.Value().(bool)}}, nil
return &celpb.Value{
Kind: &celpb.Value_BoolValue{BoolValue: res.Value().(bool)}}, nil
case types.BytesType:
return &exprpb.Value{
Kind: &exprpb.Value_BytesValue{BytesValue: res.Value().([]byte)}}, nil
return &celpb.Value{
Kind: &celpb.Value_BytesValue{BytesValue: res.Value().([]byte)}}, nil
case types.DoubleType:
return &exprpb.Value{
Kind: &exprpb.Value_DoubleValue{DoubleValue: res.Value().(float64)}}, nil
return &celpb.Value{
Kind: &celpb.Value_DoubleValue{DoubleValue: res.Value().(float64)}}, nil
case types.IntType:
return &exprpb.Value{
Kind: &exprpb.Value_Int64Value{Int64Value: res.Value().(int64)}}, nil
return &celpb.Value{
Kind: &celpb.Value_Int64Value{Int64Value: res.Value().(int64)}}, nil
case types.ListType:
l := res.(traits.Lister)
sz := l.Size().(types.Int)
elts := make([]*exprpb.Value, 0, int64(sz))
elts := make([]*celpb.Value, 0, int64(sz))
for i := types.Int(0); i < sz; i++ {
v, err := RefValueToValue(l.Get(i))
v, err := ValueAsProto(l.Get(i))
if err != nil {
return nil, err
}
elts = append(elts, v)
}
return &exprpb.Value{
Kind: &exprpb.Value_ListValue{
ListValue: &exprpb.ListValue{Values: elts}}}, nil
return &celpb.Value{
Kind: &celpb.Value_ListValue{
ListValue: &celpb.ListValue{Values: elts}}}, nil
case types.MapType:
mapper := res.(traits.Mapper)
sz := mapper.Size().(types.Int)
entries := make([]*exprpb.MapValue_Entry, 0, int64(sz))
entries := make([]*celpb.MapValue_Entry, 0, int64(sz))
for it := mapper.Iterator(); it.HasNext().(types.Bool); {
k := it.Next()
v := mapper.Get(k)
kv, err := RefValueToValue(k)
kv, err := ValueAsProto(k)
if err != nil {
return nil, err
}
vv, err := RefValueToValue(v)
vv, err := ValueAsProto(v)
if err != nil {
return nil, err
}
entries = append(entries, &exprpb.MapValue_Entry{Key: kv, Value: vv})
entries = append(entries, &celpb.MapValue_Entry{Key: kv, Value: vv})
}
return &exprpb.Value{
Kind: &exprpb.Value_MapValue{
MapValue: &exprpb.MapValue{Entries: entries}}}, nil
return &celpb.Value{
Kind: &celpb.Value_MapValue{
MapValue: &celpb.MapValue{Entries: entries}}}, nil
case types.NullType:
return &exprpb.Value{
Kind: &exprpb.Value_NullValue{}}, nil
return &celpb.Value{
Kind: &celpb.Value_NullValue{}}, nil
case types.StringType:
return &exprpb.Value{
Kind: &exprpb.Value_StringValue{StringValue: res.Value().(string)}}, nil
return &celpb.Value{
Kind: &celpb.Value_StringValue{StringValue: res.Value().(string)}}, nil
case types.TypeType:
typeName := res.(ref.Type).TypeName()
return &exprpb.Value{Kind: &exprpb.Value_TypeValue{TypeValue: typeName}}, nil
return &celpb.Value{Kind: &celpb.Value_TypeValue{TypeValue: typeName}}, nil
case types.UintType:
return &exprpb.Value{
Kind: &exprpb.Value_Uint64Value{Uint64Value: res.Value().(uint64)}}, nil
return &celpb.Value{
Kind: &celpb.Value_Uint64Value{Uint64Value: res.Value().(uint64)}}, nil
default:
any, err := res.ConvertToNative(anyPbType)
if err != nil {
return nil, err
}
return &exprpb.Value{
Kind: &exprpb.Value_ObjectValue{ObjectValue: any.(*anypb.Any)}}, nil
return &celpb.Value{
Kind: &celpb.Value_ObjectValue{ObjectValue: any.(*anypb.Any)}}, nil
}
}
@@ -192,55 +207,67 @@ var (
// ValueToRefValue converts between exprpb.Value and ref.Val.
func ValueToRefValue(adapter types.Adapter, v *exprpb.Value) (ref.Val, error) {
return AlphaProtoAsValue(adapter, v)
}
func AlphaProtoAsValue(adapter types.Adapter, v *exprpb.Value) (ref.Val, error) {
canonical := &celpb.Value{}
if err := convertProto(v, canonical); err != nil {
return nil, err
}
return ProtoAsValue(adapter, canonical)
}
func ProtoAsValue(adapter types.Adapter, v *celpb.Value) (ref.Val, error) {
switch v.Kind.(type) {
case *exprpb.Value_NullValue:
case *celpb.Value_NullValue:
return types.NullValue, nil
case *exprpb.Value_BoolValue:
case *celpb.Value_BoolValue:
return types.Bool(v.GetBoolValue()), nil
case *exprpb.Value_Int64Value:
case *celpb.Value_Int64Value:
return types.Int(v.GetInt64Value()), nil
case *exprpb.Value_Uint64Value:
case *celpb.Value_Uint64Value:
return types.Uint(v.GetUint64Value()), nil
case *exprpb.Value_DoubleValue:
case *celpb.Value_DoubleValue:
return types.Double(v.GetDoubleValue()), nil
case *exprpb.Value_StringValue:
case *celpb.Value_StringValue:
return types.String(v.GetStringValue()), nil
case *exprpb.Value_BytesValue:
case *celpb.Value_BytesValue:
return types.Bytes(v.GetBytesValue()), nil
case *exprpb.Value_ObjectValue:
case *celpb.Value_ObjectValue:
any := v.GetObjectValue()
msg, err := anypb.UnmarshalNew(any, proto.UnmarshalOptions{DiscardUnknown: true})
if err != nil {
return nil, err
}
return adapter.NativeToValue(msg), nil
case *exprpb.Value_MapValue:
case *celpb.Value_MapValue:
m := v.GetMapValue()
entries := make(map[ref.Val]ref.Val)
for _, entry := range m.Entries {
key, err := ValueToRefValue(adapter, entry.Key)
key, err := ProtoAsValue(adapter, entry.Key)
if err != nil {
return nil, err
}
pb, err := ValueToRefValue(adapter, entry.Value)
pb, err := ProtoAsValue(adapter, entry.Value)
if err != nil {
return nil, err
}
entries[key] = pb
}
return adapter.NativeToValue(entries), nil
case *exprpb.Value_ListValue:
case *celpb.Value_ListValue:
l := v.GetListValue()
elts := make([]ref.Val, len(l.Values))
for i, e := range l.Values {
rv, err := ValueToRefValue(adapter, e)
rv, err := ProtoAsValue(adapter, e)
if err != nil {
return nil, err
}
elts[i] = rv
}
return adapter.NativeToValue(elts), nil
case *exprpb.Value_TypeValue:
case *celpb.Value_TypeValue:
typeName := v.GetTypeValue()
tv, ok := typeNameToTypeValue[typeName]
if ok {
@@ -250,3 +277,12 @@ func ValueToRefValue(adapter types.Adapter, v *exprpb.Value) (ref.Val, error) {
}
return nil, errors.New("unknown value")
}
func convertProto(src, dst proto.Message) error {
pb, err := proto.Marshal(src)
if err != nil {
return err
}
err = proto.Unmarshal(pb, dst)
return err
}

View File

@@ -403,7 +403,7 @@ func optMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *
meh.NewList(),
unusedIterVar,
varName,
meh.NewMemberCall(valueFunc, target),
meh.NewMemberCall(valueFunc, meh.Copy(target)),
meh.NewLiteral(types.False),
meh.NewIdent(varName),
mapExpr,
@@ -430,7 +430,7 @@ func optFlatMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Exp
meh.NewList(),
unusedIterVar,
varName,
meh.NewMemberCall(valueFunc, target),
meh.NewMemberCall(valueFunc, meh.Copy(target)),
meh.NewLiteral(types.False),
meh.NewIdent(varName),
mapExpr,
@@ -446,6 +446,12 @@ func enableOptionalSyntax() EnvOption {
}
}
// EnableErrorOnBadPresenceTest enables error generation when a presence test or optional field
// selection is performed on a primitive type.
func EnableErrorOnBadPresenceTest(value bool) EnvOption {
return features(featureEnableErrorOnBadPresenceTest, value)
}
func decorateOptionalOr(i interpreter.Interpretable) (interpreter.Interpretable, error) {
call, ok := i.(interpreter.InterpretableCall)
if !ok {

View File

@@ -15,6 +15,8 @@
package cel
import (
"sort"
"github.com/google/cel-go/common"
"github.com/google/cel-go/common/ast"
"github.com/google/cel-go/common/types"
@@ -98,14 +100,21 @@ func (opt *StaticOptimizer) Optimize(env *Env, a *Ast) (*Ast, *Issues) {
// that the ids within the expression correspond to the ids within macros.
func normalizeIDs(idGen ast.IDGenerator, optimized ast.Expr, info *ast.SourceInfo) {
optimized.RenumberIDs(idGen)
if len(info.MacroCalls()) == 0 {
return
}
// Sort the macro ids to make sure that the renumbering of macro-specific variables
// is stable across normalization calls.
sortedMacroIDs := []int64{}
for id := range info.MacroCalls() {
sortedMacroIDs = append(sortedMacroIDs, id)
}
sort.Slice(sortedMacroIDs, func(i, j int) bool { return sortedMacroIDs[i] < sortedMacroIDs[j] })
// First, update the macro call ids themselves.
callIDMap := map[int64]int64{}
for id := range info.MacroCalls() {
for _, id := range sortedMacroIDs {
callIDMap[id] = idGen(id)
}
// Then update the macro call definitions which refer to these ids, but
@@ -116,7 +125,8 @@ func normalizeIDs(idGen ast.IDGenerator, optimized ast.Expr, info *ast.SourceInf
call ast.Expr
}
macroUpdates := []macroUpdate{}
for oldID, newID := range callIDMap {
for _, oldID := range sortedMacroIDs {
newID := callIDMap[oldID]
call, found := info.GetMacroCall(oldID)
if !found {
continue
@@ -134,6 +144,7 @@ func cleanupMacroRefs(expr ast.Expr, info *ast.SourceInfo) {
if len(info.MacroCalls()) == 0 {
return
}
// Sanitize the macro call references once the optimized expression has been computed
// and the ids normalized between the expression and the macros.
exprRefMap := make(map[int64]struct{})
@@ -200,6 +211,16 @@ type OptimizerContext struct {
*Issues
}
// ExtendEnv auguments the context's environment with the additional options.
func (opt *OptimizerContext) ExtendEnv(opts ...EnvOption) error {
e, err := opt.Env.Extend(opts...)
if err != nil {
return err
}
opt.Env = e
return nil
}
// ASTOptimizer applies an optimization over an AST and returns the optimized result.
type ASTOptimizer interface {
// Optimize optimizes a type-checked AST within an Environment and accumulates any issues.
@@ -253,6 +274,11 @@ func (opt *optimizerExprFactory) SetMacroCall(id int64, expr ast.Expr) {
opt.sourceInfo.SetMacroCall(id, expr)
}
// MacroCalls returns the map of macro calls currently in the context.
func (opt *optimizerExprFactory) MacroCalls() map[int64]ast.Expr {
return opt.sourceInfo.MacroCalls()
}
// NewBindMacro creates an AST expression representing the expanded bind() macro, and a macro expression
// representing the unexpanded call signature to be inserted into the source info macro call metadata.
func (opt *optimizerExprFactory) NewBindMacro(macroID int64, varName string, varInit, remaining ast.Expr) (astExpr, macroExpr ast.Expr) {

View File

@@ -61,6 +61,10 @@ const (
// compressing the logic graph to a single call when multiple like-operator
// expressions occur: e.g. a && b && c && d -> call(_&&_, [a, b, c, d])
featureVariadicLogicalASTs
// Enable error generation when a presence test or optional field selection is
// performed on a primitive type.
featureEnableErrorOnBadPresenceTest
)
// EnvOption is a functional interface for configuring the environment.
@@ -243,6 +247,13 @@ func Abbrevs(qualifiedNames ...string) EnvOption {
}
}
// customTypeRegistry is an internal-only interface containing the minimum methods required to support
// custom types. It is a subset of methods from ref.TypeRegistry.
type customTypeRegistry interface {
RegisterDescriptor(protoreflect.FileDescriptor) error
RegisterType(...ref.Type) error
}
// Types adds one or more type declarations to the environment, allowing for construction of
// type-literals whose definitions are included in the common expression built-in set.
//
@@ -255,12 +266,7 @@ func Abbrevs(qualifiedNames ...string) EnvOption {
// Note: This option must be specified after the CustomTypeProvider option when used together.
func Types(addTypes ...any) EnvOption {
return func(e *Env) (*Env, error) {
var reg ref.TypeRegistry
var isReg bool
reg, isReg = e.provider.(*types.Registry)
if !isReg {
reg, isReg = e.provider.(ref.TypeRegistry)
}
reg, isReg := e.provider.(customTypeRegistry)
if !isReg {
return nil, fmt.Errorf("custom types not supported by provider: %T", e.provider)
}
@@ -297,7 +303,7 @@ func Types(addTypes ...any) EnvOption {
// extension or by re-using the same EnvOption with another NewEnv() call.
func TypeDescs(descs ...any) EnvOption {
return func(e *Env) (*Env, error) {
reg, isReg := e.provider.(ref.TypeRegistry)
reg, isReg := e.provider.(customTypeRegistry)
if !isReg {
return nil, fmt.Errorf("custom types not supported by provider: %T", e.provider)
}
@@ -345,7 +351,7 @@ func TypeDescs(descs ...any) EnvOption {
}
}
func registerFileSet(reg ref.TypeRegistry, fileSet *descpb.FileDescriptorSet) error {
func registerFileSet(reg customTypeRegistry, fileSet *descpb.FileDescriptorSet) error {
files, err := protodesc.NewFiles(fileSet)
if err != nil {
return fmt.Errorf("protodesc.NewFiles(%v) failed: %v", fileSet, err)
@@ -353,7 +359,7 @@ func registerFileSet(reg ref.TypeRegistry, fileSet *descpb.FileDescriptorSet) er
return registerFiles(reg, files)
}
func registerFiles(reg ref.TypeRegistry, files *protoregistry.Files) error {
func registerFiles(reg customTypeRegistry, files *protoregistry.Files) error {
var err error
files.RangeFiles(func(fd protoreflect.FileDescriptor) bool {
err = reg.RegisterDescriptor(fd)

View File

@@ -19,6 +19,7 @@ import (
"fmt"
"sync"
"github.com/google/cel-go/common/ast"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/interpreter"
@@ -151,7 +152,7 @@ func (p *prog) clone() *prog {
// ProgramOption values.
//
// If the program cannot be configured the prog will be nil, with a non-nil error response.
func newProgram(e *Env, a *Ast, opts []ProgramOption) (Program, error) {
func newProgram(e *Env, a *ast.AST, opts []ProgramOption) (Program, error) {
// Build the dispatcher, interpreter, and default program value.
disp := interpreter.NewDispatcher()
@@ -187,10 +188,13 @@ func newProgram(e *Env, a *Ast, opts []ProgramOption) (Program, error) {
// Set the attribute factory after the options have been set.
var attrFactory interpreter.AttributeFactory
attrFactorOpts := []interpreter.AttrFactoryOption{
interpreter.EnableErrorOnBadPresenceTest(p.HasFeature(featureEnableErrorOnBadPresenceTest)),
}
if p.evalOpts&OptPartialEval == OptPartialEval {
attrFactory = interpreter.NewPartialAttributeFactory(e.Container, e.adapter, e.provider)
attrFactory = interpreter.NewPartialAttributeFactory(e.Container, e.adapter, e.provider, attrFactorOpts...)
} else {
attrFactory = interpreter.NewAttributeFactory(e.Container, e.adapter, e.provider)
attrFactory = interpreter.NewAttributeFactory(e.Container, e.adapter, e.provider, attrFactorOpts...)
}
interp := interpreter.NewInterpreter(disp, e.Container, e.provider, e.adapter, attrFactory)
p.interpreter = interp
@@ -252,9 +256,9 @@ func newProgram(e *Env, a *Ast, opts []ProgramOption) (Program, error) {
return p.initInterpretable(a, decorators)
}
func (p *prog) initInterpretable(a *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) {
func (p *prog) initInterpretable(a *ast.AST, decs []interpreter.InterpretableDecorator) (*prog, error) {
// When the AST has been exprAST it contains metadata that can be used to speed up program execution.
interpretable, err := p.interpreter.NewInterpretable(a.impl, decs...)
interpretable, err := p.interpreter.NewInterpretable(a, decs...)
if err != nil {
return nil, err
}

View File

@@ -16,7 +16,6 @@ go_library(
"options.go",
"printer.go",
"scopes.go",
"standard.go",
"types.go",
],
importpath = "github.com/google/cel-go/checker",

View File

@@ -496,16 +496,32 @@ func (c *checker) checkComprehension(e ast.Expr) {
comp := e.AsComprehension()
c.check(comp.IterRange())
c.check(comp.AccuInit())
accuType := c.getType(comp.AccuInit())
rangeType := substitute(c.mappings, c.getType(comp.IterRange()), false)
var varType *types.Type
// Create a scope for the comprehension since it has a local accumulation variable.
// This scope will contain the accumulation variable used to compute the result.
accuType := c.getType(comp.AccuInit())
c.env = c.env.enterScope()
c.env.AddIdents(decls.NewVariable(comp.AccuVar(), accuType))
var varType, var2Type *types.Type
switch rangeType.Kind() {
case types.ListKind:
// varType represents the list element type for one-variable comprehensions.
varType = rangeType.Parameters()[0]
if comp.HasIterVar2() {
// varType represents the list index (int) for two-variable comprehensions,
// and var2Type represents the list element type.
var2Type = varType
varType = types.IntType
}
case types.MapKind:
// Ranges over the keys.
// varType represents the map entry key for all comprehension types.
varType = rangeType.Parameters()[0]
if comp.HasIterVar2() {
// var2Type represents the map entry value for two-variable comprehensions.
var2Type = rangeType.Parameters()[1]
}
case types.DynKind, types.ErrorKind, types.TypeParamKind:
// Set the range type to DYN to prevent assignment to a potentially incorrect type
// at a later point in type-checking. The isAssignable call will update the type
@@ -518,13 +534,12 @@ func (c *checker) checkComprehension(e ast.Expr) {
varType = types.ErrorType
}
// Create a scope for the comprehension since it has a local accumulation variable.
// This scope will contain the accumulation variable used to compute the result.
c.env = c.env.enterScope()
c.env.AddIdents(decls.NewVariable(comp.AccuVar(), accuType))
// Create a block scope for the loop.
c.env = c.env.enterScope()
c.env.AddIdents(decls.NewVariable(comp.IterVar(), varType))
if comp.HasIterVar2() {
c.env.AddIdents(decls.NewVariable(comp.IterVar2(), var2Type))
}
// Check the variable references in the condition and step.
c.check(comp.LoopCondition())
c.assertType(comp.LoopCondition(), types.BoolType)

View File

@@ -1,35 +0,0 @@
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package checker
import (
"github.com/google/cel-go/common/stdlib"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
// StandardFunctions returns the Decls for all functions in the evaluator.
//
// Deprecated: prefer stdlib.FunctionExprDecls()
func StandardFunctions() []*exprpb.Decl {
return stdlib.FunctionExprDecls()
}
// StandardTypes returns the set of type identifiers for standard library types.
//
// Deprecated: prefer stdlib.TypeExprDecls()
func StandardTypes() []*exprpb.Decl {
return stdlib.TypeExprDecls()
}

View File

@@ -18,7 +18,6 @@ go_library(
deps = [
"//common/runes:go_default_library",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
"@org_golang_x_text//width:go_default_library",
],
)

View File

@@ -15,11 +15,13 @@ go_library(
"navigable.go",
],
importpath = "github.com/google/cel-go/common/ast",
deps = [
deps = [
"//common:go_default_library",
"//common/types:go_default_library",
"//common/types/ref:go_default_library",
"@dev_cel_expr//:expr",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//types/known/structpb:go_default_library",
],
)
@@ -35,12 +37,13 @@ go_test(
embed = [
":go_default_library",
],
deps = [
deps = [
"//checker:go_default_library",
"//checker/decls:go_default_library",
"//common:go_default_library",
"//common/containers:go_default_library",
"//common/decls:go_default_library",
"//common/operators:go_default_library",
"//common/overloads:go_default_library",
"//common/stdlib:go_default_library",
"//common/types:go_default_library",

View File

@@ -310,21 +310,18 @@ func (s *SourceInfo) SetOffsetRange(id int64, o OffsetRange) {
s.offsetRanges[id] = o
}
// ClearOffsetRange removes the OffsetRange for the given expression id.
func (s *SourceInfo) ClearOffsetRange(id int64) {
if s != nil {
delete(s.offsetRanges, id)
}
}
// GetStartLocation calculates the human-readable 1-based line and 0-based column of the first character
// of the expression node at the id.
func (s *SourceInfo) GetStartLocation(id int64) common.Location {
if o, found := s.GetOffsetRange(id); found {
line := 1
col := int(o.Start)
for _, lineOffset := range s.LineOffsets() {
if lineOffset < o.Start {
line++
col = int(o.Start - lineOffset)
} else {
break
}
}
return common.NewLocation(line, col)
return s.GetLocationByOffset(o.Start)
}
return common.NoLocation
}
@@ -336,21 +333,25 @@ func (s *SourceInfo) GetStartLocation(id int64) common.Location {
// be identical to the start location for the expression.
func (s *SourceInfo) GetStopLocation(id int64) common.Location {
if o, found := s.GetOffsetRange(id); found {
line := 1
col := int(o.Stop)
for _, lineOffset := range s.LineOffsets() {
if lineOffset < o.Stop {
line++
col = int(o.Stop - lineOffset)
} else {
break
}
}
return common.NewLocation(line, col)
return s.GetLocationByOffset(o.Stop)
}
return common.NoLocation
}
// GetLocationByOffset returns the line and column information for a given character offset.
func (s *SourceInfo) GetLocationByOffset(offset int32) common.Location {
line := 1
col := int(offset)
for _, lineOffset := range s.LineOffsets() {
if lineOffset > offset {
break
}
line++
col = int(offset - lineOffset)
}
return common.NewLocation(line, col)
}
// ComputeOffset calculates the 0-based character offset from a 1-based line and 0-based column.
func (s *SourceInfo) ComputeOffset(line, col int32) int32 {
if s != nil {

View File

@@ -17,12 +17,14 @@ package ast
import (
"fmt"
"google.golang.org/protobuf/proto"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
structpb "google.golang.org/protobuf/types/known/structpb"
celpb "cel.dev/expr"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
structpb "google.golang.org/protobuf/types/known/structpb"
)
// ToProto converts an AST to a CheckedExpr protobouf.
@@ -173,9 +175,10 @@ func exprComprehension(factory ExprFactory, id int64, comp *exprpb.Expr_Comprehe
if err != nil {
return nil, err
}
return factory.NewComprehension(id,
return factory.NewComprehensionTwoVar(id,
iterRange,
comp.GetIterVar(),
comp.GetIterVar2(),
comp.GetAccuVar(),
accuInit,
loopCond,
@@ -363,6 +366,7 @@ func protoComprehension(id int64, comp ComprehensionExpr) (*exprpb.Expr, error)
ExprKind: &exprpb.Expr_ComprehensionExpr{
ComprehensionExpr: &exprpb.Expr_Comprehension{
IterVar: comp.IterVar(),
IterVar2: comp.IterVar2(),
IterRange: iterRange,
AccuVar: comp.AccuVar(),
AccuInit: accuInit,
@@ -609,24 +613,47 @@ func ValToConstant(v ref.Val) (*exprpb.Constant, error) {
// ConstantToVal converts a protobuf Constant to a CEL-native ref.Val.
func ConstantToVal(c *exprpb.Constant) (ref.Val, error) {
return AlphaProtoConstantAsVal(c)
}
// AlphaProtoConstantAsVal converts a v1alpha1.Constant protobuf to a CEL-native ref.Val.
func AlphaProtoConstantAsVal(c *exprpb.Constant) (ref.Val, error) {
if c == nil {
return nil, nil
}
canonical := &celpb.Constant{}
if err := convertProto(c, canonical); err != nil {
return nil, err
}
return ProtoConstantAsVal(canonical)
}
// ProtoConstantAsVal converts a canonical celpb.Constant protobuf to a CEL-native ref.Val.
func ProtoConstantAsVal(c *celpb.Constant) (ref.Val, error) {
switch c.GetConstantKind().(type) {
case *exprpb.Constant_BoolValue:
case *celpb.Constant_BoolValue:
return types.Bool(c.GetBoolValue()), nil
case *exprpb.Constant_BytesValue:
case *celpb.Constant_BytesValue:
return types.Bytes(c.GetBytesValue()), nil
case *exprpb.Constant_DoubleValue:
case *celpb.Constant_DoubleValue:
return types.Double(c.GetDoubleValue()), nil
case *exprpb.Constant_Int64Value:
case *celpb.Constant_Int64Value:
return types.Int(c.GetInt64Value()), nil
case *exprpb.Constant_NullValue:
case *celpb.Constant_NullValue:
return types.NullValue, nil
case *exprpb.Constant_StringValue:
case *celpb.Constant_StringValue:
return types.String(c.GetStringValue()), nil
case *exprpb.Constant_Uint64Value:
case *celpb.Constant_Uint64Value:
return types.Uint(c.GetUint64Value()), nil
}
return nil, fmt.Errorf("unsupported constant kind: %v", c.GetConstantKind())
}
func convertProto(src, dst proto.Message) error {
pb, err := proto.Marshal(src)
if err != nil {
return err
}
err = proto.Unmarshal(pb, dst)
return err
}

View File

@@ -158,7 +158,7 @@ type EntryExpr interface {
// IDGenerator produces unique ids suitable for tagging expression nodes
type IDGenerator func(originalID int64) int64
// CallExpr defines an interface for inspecting a function call and its arugments.
// CallExpr defines an interface for inspecting a function call and its arguments.
type CallExpr interface {
// FunctionName returns the name of the function.
FunctionName() string
@@ -269,8 +269,22 @@ type ComprehensionExpr interface {
IterRange() Expr
// IterVar returns the iteration variable name.
//
// For one-variable comprehensions, the iter var refers to the element value
// when iterating over a list, or the map key when iterating over a map.
//
// For two-variable comprehneions, the iter var refers to the list index or the
// map key.
IterVar() string
// IterVar2 returns the second iteration variable name.
//
// When the value is non-empty, the comprehension is a two-variable comprehension.
IterVar2() string
// HasIterVar2 returns true if the second iteration variable is non-empty.
HasIterVar2() bool
// AccuVar returns the accumulation variable name.
AccuVar() string
@@ -397,6 +411,7 @@ func (e *expr) SetKindCase(other Expr) {
e.exprKindCase = &baseComprehensionExpr{
iterRange: c.IterRange(),
iterVar: c.IterVar(),
iterVar2: c.IterVar2(),
accuVar: c.AccuVar(),
accuInit: c.AccuInit(),
loopCond: c.LoopCondition(),
@@ -505,6 +520,7 @@ var _ ComprehensionExpr = &baseComprehensionExpr{}
type baseComprehensionExpr struct {
iterRange Expr
iterVar string
iterVar2 string
accuVar string
accuInit Expr
loopCond Expr
@@ -527,6 +543,14 @@ func (e *baseComprehensionExpr) IterVar() string {
return e.iterVar
}
func (e *baseComprehensionExpr) IterVar2() string {
return e.iterVar2
}
func (e *baseComprehensionExpr) HasIterVar2() bool {
return e.iterVar2 != ""
}
func (e *baseComprehensionExpr) AccuVar() string {
return e.accuVar
}

View File

@@ -27,9 +27,12 @@ type ExprFactory interface {
// NewCall creates an Expr value representing a global function call.
NewCall(id int64, function string, args ...Expr) Expr
// NewComprehension creates an Expr value representing a comprehension over a value range.
// NewComprehension creates an Expr value representing a one-variable comprehension over a value range.
NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCondition, loopStep, result Expr) Expr
// NewComprehensionTwoVar creates an Expr value representing a two-variable comprehension over a value range.
NewComprehensionTwoVar(id int64, iterRange Expr, iterVar, iterVar2, accuVar string, accuInit, loopCondition, loopStep, result Expr) Expr
// NewMemberCall creates an Expr value representing a member function call.
NewMemberCall(id int64, function string, receiver Expr, args ...Expr) Expr
@@ -111,11 +114,17 @@ func (fac *baseExprFactory) NewMemberCall(id int64, function string, target Expr
}
func (fac *baseExprFactory) NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCond, loopStep, result Expr) Expr {
// Set the iter_var2 to empty string to indicate the second variable is omitted
return fac.NewComprehensionTwoVar(id, iterRange, iterVar, "", accuVar, accuInit, loopCond, loopStep, result)
}
func (fac *baseExprFactory) NewComprehensionTwoVar(id int64, iterRange Expr, iterVar, iterVar2, accuVar string, accuInit, loopCond, loopStep, result Expr) Expr {
return fac.newExpr(
id,
&baseComprehensionExpr{
iterRange: iterRange,
iterVar: iterVar,
iterVar2: iterVar2,
accuVar: accuVar,
accuInit: accuInit,
loopCond: loopCond,
@@ -223,9 +232,10 @@ func (fac *baseExprFactory) CopyExpr(e Expr) Expr {
return fac.NewMemberCall(e.ID(), c.FunctionName(), fac.CopyExpr(c.Target()), argsCopy...)
case ComprehensionKind:
compre := e.AsComprehension()
return fac.NewComprehension(e.ID(),
return fac.NewComprehensionTwoVar(e.ID(),
fac.CopyExpr(compre.IterRange()),
compre.IterVar(),
compre.IterVar2(),
compre.AccuVar(),
fac.CopyExpr(compre.AccuInit()),
fac.CopyExpr(compre.LoopCondition()),

View File

@@ -390,6 +390,14 @@ func (comp navigableComprehensionImpl) IterVar() string {
return comp.Expr.AsComprehension().IterVar()
}
func (comp navigableComprehensionImpl) IterVar2() string {
return comp.Expr.AsComprehension().IterVar2()
}
func (comp navigableComprehensionImpl) HasIterVar2() bool {
return comp.Expr.AsComprehension().HasIterVar2()
}
func (comp navigableComprehensionImpl) AccuVar() string {
return comp.Expr.AsComprehension().AccuVar()
}

View File

@@ -19,6 +19,7 @@ package containers
import (
"fmt"
"strings"
"unicode"
"github.com/google/cel-go/common/ast"
)
@@ -212,6 +213,13 @@ type ContainerOption func(*Container) (*Container, error)
func Abbrevs(qualifiedNames ...string) ContainerOption {
return func(c *Container) (*Container, error) {
for _, qn := range qualifiedNames {
qn = strings.TrimSpace(qn)
for _, r := range qn {
if !isIdentifierChar(r) {
return nil, fmt.Errorf(
"invalid qualified name: %s, wanted name of the form 'qualified.name'", qn)
}
}
ind := strings.LastIndex(qn, ".")
if ind <= 0 || ind >= len(qn)-1 {
return nil, fmt.Errorf(
@@ -278,6 +286,10 @@ func aliasAs(kind, qualifiedName, alias string) ContainerOption {
}
}
func isIdentifierChar(r rune) bool {
return r <= unicode.MaxASCII && (r == '.' || r == '_' || unicode.IsLetter(r) || unicode.IsNumber(r))
}
// Name sets the fully-qualified name of the Container.
func Name(name string) ContainerOption {
return func(c *Container) (*Container, error) {

View File

@@ -215,6 +215,11 @@ func (w *debugWriter) appendComprehension(comprehension ast.ComprehensionExpr) {
w.append(comprehension.IterVar())
w.append(",")
w.appendLine()
if comprehension.HasIterVar2() {
w.append(comprehension.IterVar2())
w.append(",")
w.appendLine()
}
w.append("// Target")
w.appendLine()
w.Buffer(comprehension.IterRange())

View File

@@ -162,7 +162,9 @@ func (f *FunctionDecl) AddOverload(overload *OverloadDecl) error {
if oID == overload.ID() {
if o.SignatureEquals(overload) && o.IsNonStrict() == overload.IsNonStrict() {
// Allow redefinition of an overload implementation so long as the signatures match.
f.overloads[oID] = overload
if overload.hasBinding() {
f.overloads[oID] = overload
}
return nil
}
return fmt.Errorf("overload redefinition in function. %s: %s has multiple definitions", f.Name(), oID)
@@ -249,15 +251,15 @@ func (f *FunctionDecl) Bindings() ([]*functions.Overload, error) {
// are preserved in order to assist with the function resolution step.
switch len(args) {
case 1:
if o.unaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
if o.unaryOp != nil && o.matchesRuntimeSignature(f.disableTypeGuards, args...) {
return o.unaryOp(args[0])
}
case 2:
if o.binaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
if o.binaryOp != nil && o.matchesRuntimeSignature(f.disableTypeGuards, args...) {
return o.binaryOp(args[0], args[1])
}
}
if o.functionOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
if o.functionOp != nil && o.matchesRuntimeSignature(f.disableTypeGuards, args...) {
return o.functionOp(args...)
}
// eventually this will fall through to the noSuchOverload below.
@@ -775,8 +777,13 @@ func (v *VariableDecl) DeclarationIsEquivalent(other *VariableDecl) bool {
return v.Name() == other.Name() && v.Type().IsEquivalentType(other.Type())
}
// VariableDeclToExprDecl converts a go-native variable declaration into a protobuf-type variable declaration.
func VariableDeclToExprDecl(v *VariableDecl) (*exprpb.Decl, error) {
// TypeVariable creates a new type identifier for use within a types.Provider
func TypeVariable(t *types.Type) *VariableDecl {
return NewVariable(t.TypeName(), types.NewTypeTypeWithParam(t))
}
// variableDeclToExprDecl converts a go-native variable declaration into a protobuf-type variable declaration.
func variableDeclToExprDecl(v *VariableDecl) (*exprpb.Decl, error) {
varType, err := types.TypeToExprType(v.Type())
if err != nil {
return nil, err
@@ -784,13 +791,8 @@ func VariableDeclToExprDecl(v *VariableDecl) (*exprpb.Decl, error) {
return chkdecls.NewVar(v.Name(), varType), nil
}
// TypeVariable creates a new type identifier for use within a types.Provider
func TypeVariable(t *types.Type) *VariableDecl {
return NewVariable(t.TypeName(), types.NewTypeTypeWithParam(t))
}
// FunctionDeclToExprDecl converts a go-native function declaration into a protobuf-typed function declaration.
func FunctionDeclToExprDecl(f *FunctionDecl) (*exprpb.Decl, error) {
// functionDeclToExprDecl converts a go-native function declaration into a protobuf-typed function declaration.
func functionDeclToExprDecl(f *FunctionDecl) (*exprpb.Decl, error) {
overloads := make([]*exprpb.Decl_FunctionDecl_Overload, len(f.overloads))
for i, oID := range f.overloadOrdinals {
o := f.overloads[oID]

View File

@@ -18,8 +18,6 @@ import (
"fmt"
"strings"
"unicode/utf8"
"golang.org/x/text/width"
)
// NewError creates an error associated with an expression id with the given message at the given location.
@@ -35,18 +33,15 @@ type Error struct {
}
const (
dot = "."
ind = "^"
dot = "."
ind = "^"
wideDot = "\uff0e"
wideInd = "\uff3e"
// maxSnippetLength is the largest number of characters which can be rendered in an error message snippet.
maxSnippetLength = 16384
)
var (
wideDot = width.Widen.String(dot)
wideInd = width.Widen.String(ind)
)
// ToDisplayString decorates the error message with the source location.
func (e *Error) ToDisplayString(source Source) string {
var result = fmt.Sprintf("ERROR: %s:%d:%d: %s",

View File

@@ -127,20 +127,48 @@ var nilBuffer = &emptyBuffer{}
// elements of the byte or uint16 array, and continue. The underlying storage is an rune array
// containing any Unicode character.
func NewBuffer(data string) Buffer {
buf, _ := newBuffer(data, false)
return buf
}
// NewBufferAndLineOffsets returns an efficient implementation of Buffer for the given text based on
// the ranges of the encoded code points contained within, as well as returning the line offsets.
//
// Code points are represented as an array of byte, uint16, or rune. This approach ensures that
// each index represents a code point by itself without needing to use an array of rune. At first
// we assume all code points are less than or equal to '\u007f'. If this holds true, the
// underlying storage is a byte array containing only ASCII characters. If we encountered a code
// point above this range but less than or equal to '\uffff' we allocate a uint16 array, copy the
// elements of previous byte array to the uint16 array, and continue. If this holds true, the
// underlying storage is a uint16 array containing only Unicode characters in the Basic Multilingual
// Plane. If we encounter a code point above '\uffff' we allocate an rune array, copy the previous
// elements of the byte or uint16 array, and continue. The underlying storage is an rune array
// containing any Unicode character.
func NewBufferAndLineOffsets(data string) (Buffer, []int32) {
return newBuffer(data, true)
}
func newBuffer(data string, lines bool) (Buffer, []int32) {
if len(data) == 0 {
return nilBuffer
return nilBuffer, []int32{0}
}
var (
idx = 0
buf8 = make([]byte, 0, len(data))
idx = 0
off int32 = 0
buf8 = make([]byte, 0, len(data))
buf16 []uint16
buf32 []rune
offs []int32
)
for idx < len(data) {
r, s := utf8.DecodeRuneInString(data[idx:])
idx += s
if lines && r == '\n' {
offs = append(offs, off+1)
}
if r < utf8.RuneSelf {
buf8 = append(buf8, byte(r))
off++
continue
}
if r <= 0xffff {
@@ -150,6 +178,7 @@ func NewBuffer(data string) Buffer {
}
buf8 = nil
buf16 = append(buf16, uint16(r))
off++
goto copy16
}
buf32 = make([]rune, len(buf8), len(data))
@@ -158,17 +187,25 @@ func NewBuffer(data string) Buffer {
}
buf8 = nil
buf32 = append(buf32, r)
off++
goto copy32
}
if lines {
offs = append(offs, off+1)
}
return &asciiBuffer{
arr: buf8,
}
}, offs
copy16:
for idx < len(data) {
r, s := utf8.DecodeRuneInString(data[idx:])
idx += s
if lines && r == '\n' {
offs = append(offs, off+1)
}
if r <= 0xffff {
buf16 = append(buf16, uint16(r))
off++
continue
}
buf32 = make([]rune, len(buf16), len(data))
@@ -177,18 +214,29 @@ copy16:
}
buf16 = nil
buf32 = append(buf32, r)
off++
goto copy32
}
if lines {
offs = append(offs, off+1)
}
return &basicBuffer{
arr: buf16,
}
}, offs
copy32:
for idx < len(data) {
r, s := utf8.DecodeRuneInString(data[idx:])
idx += s
if lines && r == '\n' {
offs = append(offs, off+1)
}
buf32 = append(buf32, r)
off++
}
if lines {
offs = append(offs, off+1)
}
return &supplementalBuffer{
arr: buf32,
}
}, offs
}

View File

@@ -15,9 +15,6 @@
package common
import (
"strings"
"unicode/utf8"
"github.com/google/cel-go/common/runes"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
@@ -80,17 +77,11 @@ func NewTextSource(text string) Source {
// NewStringSource creates a new Source from the given contents and description.
func NewStringSource(contents string, description string) Source {
// Compute line offsets up front as they are referred to frequently.
lines := strings.Split(contents, "\n")
offsets := make([]int32, len(lines))
var offset int32
for i, line := range lines {
offset = offset + int32(utf8.RuneCountInString(line)) + 1
offsets[int32(i)] = offset
}
buf, offs := runes.NewBufferAndLineOffsets(contents)
return &sourceImpl{
Buffer: runes.NewBuffer(contents),
Buffer: buf,
description: description,
lineOffsets: offsets,
lineOffsets: offs,
}
}
@@ -172,9 +163,8 @@ func (s *sourceImpl) findLine(characterOffset int32) (int32, int32) {
for _, lineOffset := range s.lineOffsets {
if lineOffset > characterOffset {
break
} else {
line++
}
line++
}
if line == 1 {
return line, 0

View File

@@ -12,7 +12,6 @@ go_library(
],
importpath = "github.com/google/cel-go/common/stdlib",
deps = [
"//checker/decls:go_default_library",
"//common/decls:go_default_library",
"//common/functions:go_default_library",
"//common/operators:go_default_library",
@@ -20,6 +19,5 @@ go_library(
"//common/types:go_default_library",
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
],
)

View File

@@ -23,15 +23,11 @@ import (
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
var (
stdFunctions []*decls.FunctionDecl
stdFnDecls []*exprpb.Decl
stdTypes []*decls.VariableDecl
stdTypeDecls []*exprpb.Decl
)
func init() {
@@ -55,15 +51,6 @@ func init() {
decls.TypeVariable(types.UintType),
}
stdTypeDecls = make([]*exprpb.Decl, 0, len(stdTypes))
for _, stdType := range stdTypes {
typeVar, err := decls.VariableDeclToExprDecl(stdType)
if err != nil {
panic(err)
}
stdTypeDecls = append(stdTypeDecls, typeVar)
}
stdFunctions = []*decls.FunctionDecl{
// Logical operators. Special-cased within the interpreter.
// Note, the singleton binding prevents extensions from overriding the operator behavior.
@@ -576,18 +563,6 @@ func init() {
decls.MemberOverload(overloads.DurationToMilliseconds,
argTypes(types.DurationType), types.IntType)),
}
stdFnDecls = make([]*exprpb.Decl, 0, len(stdFunctions))
for _, fn := range stdFunctions {
if fn.IsDeclarationDisabled() {
continue
}
ed, err := decls.FunctionDeclToExprDecl(fn)
if err != nil {
panic(err)
}
stdFnDecls = append(stdFnDecls, ed)
}
}
// Functions returns the set of standard library function declarations and definitions for CEL.
@@ -595,27 +570,11 @@ func Functions() []*decls.FunctionDecl {
return stdFunctions
}
// FunctionExprDecls returns the legacy style protobuf-typed declarations for all functions and overloads
// in the CEL standard environment.
//
// Deprecated: use Functions
func FunctionExprDecls() []*exprpb.Decl {
return stdFnDecls
}
// Types returns the set of standard library types for CEL.
func Types() []*decls.VariableDecl {
return stdTypes
}
// TypeExprDecls returns the legacy style protobuf-typed declarations for all types in the CEL
// standard environment.
//
// Deprecated: use Types
func TypeExprDecls() []*exprpb.Decl {
return stdTypeDecls
}
func notStrictlyFalse(value ref.Val) ref.Val {
if types.IsBool(value) {
return value

View File

@@ -40,10 +40,12 @@ go_library(
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"@com_github_stoewer_go_strcase//:go_default_library",
"@dev_cel_expr//:expr",
"@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
"@org_golang_google_protobuf//encoding/protojson:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
"@org_golang_google_protobuf//types/dynamicpb:go_default_library",
"@org_golang_google_protobuf//types/known/anypb:go_default_library",
"@org_golang_google_protobuf//types/known/durationpb:go_default_library",
"@org_golang_google_protobuf//types/known/structpb:go_default_library",

View File

@@ -58,7 +58,17 @@ func (b Bytes) Compare(other ref.Val) ref.Val {
// ConvertToNative implements the ref.Val interface method.
func (b Bytes) ConvertToNative(typeDesc reflect.Type) (any, error) {
switch typeDesc.Kind() {
case reflect.Array, reflect.Slice:
case reflect.Array:
if len(b) != typeDesc.Len() {
return nil, fmt.Errorf("[%d]byte not assignable to [%d]byte array", len(b), typeDesc.Len())
}
refArrPtr := reflect.New(reflect.ArrayOf(len(b), typeDesc.Elem()))
refArr := refArrPtr.Elem()
for i, byt := range b {
refArr.Index(i).Set(reflect.ValueOf(byt).Convert(typeDesc.Elem()))
}
return refArr.Interface(), nil
case reflect.Slice:
return reflect.ValueOf(b).Convert(typeDesc).Interface(), nil
case reflect.Ptr:
switch typeDesc {

View File

@@ -256,6 +256,15 @@ func (l *baseList) IsZeroValue() bool {
return l.size == 0
}
// Fold calls the FoldEntry method for each (index, value) pair in the list.
func (l *baseList) Fold(f traits.Folder) {
for i := 0; i < l.size; i++ {
if !f.FoldEntry(i, l.get(i)) {
break
}
}
}
// Iterator implements the traits.Iterable interface method.
func (l *baseList) Iterator() traits.Iterator {
return newListIterator(l)
@@ -433,6 +442,15 @@ func (l *concatList) IsZeroValue() bool {
return l.Size().(Int) == 0
}
// Fold calls the FoldEntry method for each (index, value) pair in the list.
func (l *concatList) Fold(f traits.Folder) {
for i := Int(0); i < l.Size().(Int); i++ {
if !f.FoldEntry(i, l.Get(i)) {
break
}
}
}
// Iterator implements the traits.Iterable interface method.
func (l *concatList) Iterator() traits.Iterator {
return newListIterator(l)
@@ -527,3 +545,30 @@ func IndexOrError(index ref.Val) (int, error) {
return -1, fmt.Errorf("unsupported index type '%s' in list", index.Type())
}
}
// ToFoldableList will create a Foldable version of a list suitable for key-value pair iteration.
//
// For values which are already Foldable, this call is a no-op. For all other values, the fold is
// driven via the Size() and Get() calls which means that the folding will function, but take a
// performance hit.
func ToFoldableList(l traits.Lister) traits.Foldable {
if f, ok := l.(traits.Foldable); ok {
return f
}
return interopFoldableList{Lister: l}
}
type interopFoldableList struct {
traits.Lister
}
// Fold implements the traits.Foldable interface method and performs an iteration over the
// range of elements of the list.
func (l interopFoldableList) Fold(f traits.Folder) {
sz := l.Size().(Int)
for i := Int(0); i < sz; i++ {
if !f.FoldEntry(i, l.Get(i)) {
break
}
}
}

View File

@@ -94,6 +94,24 @@ func NewProtoMap(adapter Adapter, value *pb.Map) traits.Mapper {
}
}
// NewMutableMap constructs a mutable map from an adapter and a set of map values.
func NewMutableMap(adapter Adapter, mutableValues map[ref.Val]ref.Val) traits.MutableMapper {
mutableCopy := make(map[ref.Val]ref.Val, len(mutableValues))
for k, v := range mutableValues {
mutableCopy[k] = v
}
m := &mutableMap{
baseMap: &baseMap{
Adapter: adapter,
mapAccessor: newRefValMapAccessor(mutableCopy),
value: mutableCopy,
size: len(mutableCopy),
},
mutableValues: mutableCopy,
}
return m
}
// mapAccessor is a private interface for finding values within a map and iterating over the keys.
// This interface implements portions of the API surface area required by the traits.Mapper
// interface.
@@ -105,6 +123,9 @@ type mapAccessor interface {
// Iterator returns an Iterator over the map key set.
Iterator() traits.Iterator
// Fold calls the FoldEntry method for each (key, value) pair in the map.
Fold(traits.Folder)
}
// baseMap is a reflection based map implementation designed to handle a variety of map-like types.
@@ -307,6 +328,28 @@ func (m *baseMap) Value() any {
return m.value
}
// mutableMap holds onto a set of mutable values which are used for intermediate computations.
type mutableMap struct {
*baseMap
mutableValues map[ref.Val]ref.Val
}
// Insert implements the traits.MutableMapper interface method, returning true if the key insertion
// succeeds.
func (m *mutableMap) Insert(k, v ref.Val) ref.Val {
if _, found := m.Find(k); found {
return NewErr("insert failed: key %v already exists", k)
}
m.mutableValues[k] = v
return m
}
// ToImmutableMap implements the traits.MutableMapper interface method, converting a mutable map
// an immutable map implementation.
func (m *mutableMap) ToImmutableMap() traits.Mapper {
return NewRefValMap(m.Adapter, m.mutableValues)
}
func newJSONStructAccessor(adapter Adapter, st map[string]*structpb.Value) mapAccessor {
return &jsonStructAccessor{
Adapter: adapter,
@@ -350,6 +393,15 @@ func (a *jsonStructAccessor) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (a *jsonStructAccessor) Fold(f traits.Folder) {
for k, v := range a.st {
if !f.FoldEntry(k, v) {
break
}
}
}
func newReflectMapAccessor(adapter Adapter, value reflect.Value) mapAccessor {
keyType := value.Type().Key()
return &reflectMapAccessor{
@@ -424,6 +476,16 @@ func (m *reflectMapAccessor) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (m *reflectMapAccessor) Fold(f traits.Folder) {
mapRange := m.refValue.MapRange()
for mapRange.Next() {
if !f.FoldEntry(mapRange.Key().Interface(), mapRange.Value().Interface()) {
break
}
}
}
func newRefValMapAccessor(mapVal map[ref.Val]ref.Val) mapAccessor {
return &refValMapAccessor{mapVal: mapVal}
}
@@ -477,6 +539,15 @@ func (a *refValMapAccessor) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (a *refValMapAccessor) Fold(f traits.Folder) {
for k, v := range a.mapVal {
if !f.FoldEntry(k, v) {
break
}
}
}
func newStringMapAccessor(strMap map[string]string) mapAccessor {
return &stringMapAccessor{mapVal: strMap}
}
@@ -515,6 +586,15 @@ func (a *stringMapAccessor) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (a *stringMapAccessor) Fold(f traits.Folder) {
for k, v := range a.mapVal {
if !f.FoldEntry(k, v) {
break
}
}
}
func newStringIfaceMapAccessor(adapter Adapter, mapVal map[string]any) mapAccessor {
return &stringIfaceMapAccessor{
Adapter: adapter,
@@ -557,6 +637,15 @@ func (a *stringIfaceMapAccessor) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (a *stringIfaceMapAccessor) Fold(f traits.Folder) {
for k, v := range a.mapVal {
if !f.FoldEntry(k, v) {
break
}
}
}
// protoMap is a specialized, separate implementation of the traits.Mapper interfaces tailored to
// accessing protoreflect.Map values.
type protoMap struct {
@@ -769,6 +858,13 @@ func (m *protoMap) Iterator() traits.Iterator {
}
}
// Fold calls the FoldEntry method for each (key, value) pair in the map.
func (m *protoMap) Fold(f traits.Folder) {
m.value.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool {
return f.FoldEntry(k.Interface(), v.Interface())
})
}
// Size returns the number of entries in the protoreflect.Map.
func (m *protoMap) Size() ref.Val {
return Int(m.value.Len())
@@ -852,3 +948,55 @@ func (it *stringKeyIterator) Next() ref.Val {
}
return nil
}
// ToFoldableMap will create a Foldable version of a map suitable for key-value pair iteration.
//
// For values which are already Foldable, this call is a no-op. For all other values, the fold
// is driven via the Iterator HasNext() and Next() calls as well as the map's Get() method
// which means that the folding will function, but take a performance hit.
func ToFoldableMap(m traits.Mapper) traits.Foldable {
if f, ok := m.(traits.Foldable); ok {
return f
}
return interopFoldableMap{Mapper: m}
}
type interopFoldableMap struct {
traits.Mapper
}
func (m interopFoldableMap) Fold(f traits.Folder) {
it := m.Iterator()
for it.HasNext() == True {
k := it.Next()
if !f.FoldEntry(k, m.Get(k)) {
break
}
}
}
// InsertMapKeyValue inserts a key, value pair into the target map if the target map does not
// already contain the given key.
//
// If the map is mutable, it is modified in-place per the MutableMapper contract.
// If the map is not mutable, a copy containing the new key, value pair is made.
func InsertMapKeyValue(m traits.Mapper, k, v ref.Val) ref.Val {
if mutable, ok := m.(traits.MutableMapper); ok {
return mutable.Insert(k, v)
}
// Otherwise perform the slow version of the insertion which makes a copy of the incoming map.
if _, found := m.Find(k); !found {
size := m.Size().(Int)
copy := make(map[ref.Val]ref.Val, size+1)
copy[k] = v
it := m.Iterator()
for it.HasNext() == True {
nextK := it.Next()
nextV := m.Get(nextK)
copy[nextK] = nextV
}
return DefaultTypeAdapter.NativeToValue(copy)
}
return NewErr("insert failed: key %v already exists", k)
}

View File

@@ -35,6 +35,8 @@ var (
// golang reflect type for Null values.
nullReflectType = reflect.TypeOf(NullValue)
protoIfaceType = reflect.TypeOf((*proto.Message)(nil)).Elem()
)
// ConvertToNative implements ref.Val.ConvertToNative.
@@ -61,8 +63,14 @@ func (n Null) ConvertToNative(typeDesc reflect.Type) (any, error) {
return structpb.NewNullValue(), nil
case boolWrapperType, byteWrapperType, doubleWrapperType, floatWrapperType,
int32WrapperType, int64WrapperType, stringWrapperType, uint32WrapperType,
uint64WrapperType:
uint64WrapperType, durationValueType, timestampValueType, protoIfaceType:
return nil, nil
case jsonListValueType, jsonStructType:
// skip handling
default:
if typeDesc.Implements(protoIfaceType) {
return nil, nil
}
}
case reflect.Interface:
nv := n.Value()

View File

@@ -427,22 +427,49 @@ func unwrap(desc description, msg proto.Message) (any, bool, error) {
return structpb.NullValue_NULL_VALUE, true, nil
}
case *wrapperspb.BoolValue:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
case *wrapperspb.BytesValue:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
case *wrapperspb.DoubleValue:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
case *wrapperspb.FloatValue:
if v == nil {
return nil, true, nil
}
return float64(v.GetValue()), true, nil
case *wrapperspb.Int32Value:
if v == nil {
return nil, true, nil
}
return int64(v.GetValue()), true, nil
case *wrapperspb.Int64Value:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
case *wrapperspb.StringValue:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
case *wrapperspb.UInt32Value:
if v == nil {
return nil, true, nil
}
return uint64(v.GetValue()), true, nil
case *wrapperspb.UInt64Value:
if v == nil {
return nil, true, nil
}
return v.GetValue(), true, nil
}
return msg, false, nil

View File

@@ -585,6 +585,14 @@ func nativeToValue(a Adapter, value any) (ref.Val, bool) {
refKind := refValue.Kind()
switch refKind {
case reflect.Array, reflect.Slice:
if refValue.Type().Elem() == reflect.TypeOf(byte(0)) {
if refValue.CanAddr() {
return Bytes(refValue.Bytes()), true
}
tmp := reflect.New(refValue.Type())
tmp.Elem().Set(refValue)
return Bytes(tmp.Elem().Bytes()), true
}
return NewDynamicList(a, v), true
case reflect.Map:
return NewDynamicMap(a, v), true

View File

@@ -34,3 +34,16 @@ type Iterator interface {
// Next returns the next element.
Next() ref.Val
}
// Foldable aggregate types support iteration over (key, value) or (index, value) pairs.
type Foldable interface {
// Fold invokes the Folder.FoldEntry for all entries in the type
Fold(Folder)
}
// Folder performs a fold on a given entry and indicates whether to continue folding.
type Folder interface {
// FoldEntry indicates the key, value pair associated with the entry.
// If the output is true, continue folding. Otherwise, terminate the fold.
FoldEntry(key, val any) bool
}

View File

@@ -27,6 +27,9 @@ type Lister interface {
}
// MutableLister interface which emits an immutable result after an intermediate computation.
//
// Note, this interface is intended only to be used within Comprehensions where the mutable
// value is not directly observable within the user-authored CEL expression.
type MutableLister interface {
Lister
ToImmutableList() Lister

View File

@@ -31,3 +31,18 @@ type Mapper interface {
// (Unknown|Err, false).
Find(key ref.Val) (ref.Val, bool)
}
// MutableMapper interface which emits an immutable result after an intermediate computation.
//
// Note, this interface is intended only to be used within Comprehensions where the mutable
// value is not directly observable within the user-authored CEL expression.
type MutableMapper interface {
Mapper
// Insert a key, value pair into the map, returning the map if the insert is successful
// and an error if key already exists in the mutable map.
Insert(k, v ref.Val) ref.Val
// ToImmutableMap converts a mutable map into an immutable map.
ToImmutableMap() Mapper
}

View File

@@ -59,6 +59,21 @@ const (
// SizerType types support the size() method.
SizerType
// SubtractorType type support '-' operations.
// SubtractorType types support '-' operations.
SubtractorType
// FoldableType types support comprehensions v2 macros which iterate over (key, value) pairs.
FoldableType
)
const (
// ListerType supports a set of traits necessary for list operations.
//
// The ListerType is syntactic sugar and not intended to be a perfect reflection of all List operators.
ListerType = AdderType | ContainerType | IndexerType | IterableType | SizerType
// MapperType supports a set of traits necessary for map operations.
//
// The MapperType is syntactic sugar and not intended to be a perfect reflection of all Map operators.
MapperType = ContainerType | IndexerType | IterableType | SizerType
)

View File

@@ -19,10 +19,13 @@ import (
"reflect"
"strings"
"google.golang.org/protobuf/proto"
chkdecls "github.com/google/cel-go/checker/decls"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
celpb "cel.dev/expr"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
@@ -666,85 +669,99 @@ func TypeToExprType(t *Type) (*exprpb.Type, error) {
// ExprTypeToType converts a protobuf CEL type representation to a CEL-native type representation.
func ExprTypeToType(t *exprpb.Type) (*Type, error) {
return AlphaProtoAsType(t)
}
// AlphaProtoAsType converts a CEL v1alpha1.Type protobuf type to a CEL-native type representation.
func AlphaProtoAsType(t *exprpb.Type) (*Type, error) {
canonical := &celpb.Type{}
if err := convertProto(t, canonical); err != nil {
return nil, err
}
return ProtoAsType(canonical)
}
// ProtoAsType converts a canonical CEL celpb.Type protobuf type to a CEL-native type representation.
func ProtoAsType(t *celpb.Type) (*Type, error) {
switch t.GetTypeKind().(type) {
case *exprpb.Type_Dyn:
case *celpb.Type_Dyn:
return DynType, nil
case *exprpb.Type_AbstractType_:
case *celpb.Type_AbstractType_:
paramTypes := make([]*Type, len(t.GetAbstractType().GetParameterTypes()))
for i, p := range t.GetAbstractType().GetParameterTypes() {
pt, err := ExprTypeToType(p)
pt, err := ProtoAsType(p)
if err != nil {
return nil, err
}
paramTypes[i] = pt
}
return NewOpaqueType(t.GetAbstractType().GetName(), paramTypes...), nil
case *exprpb.Type_ListType_:
et, err := ExprTypeToType(t.GetListType().GetElemType())
case *celpb.Type_ListType_:
et, err := ProtoAsType(t.GetListType().GetElemType())
if err != nil {
return nil, err
}
return NewListType(et), nil
case *exprpb.Type_MapType_:
kt, err := ExprTypeToType(t.GetMapType().GetKeyType())
case *celpb.Type_MapType_:
kt, err := ProtoAsType(t.GetMapType().GetKeyType())
if err != nil {
return nil, err
}
vt, err := ExprTypeToType(t.GetMapType().GetValueType())
vt, err := ProtoAsType(t.GetMapType().GetValueType())
if err != nil {
return nil, err
}
return NewMapType(kt, vt), nil
case *exprpb.Type_MessageType:
case *celpb.Type_MessageType:
return NewObjectType(t.GetMessageType()), nil
case *exprpb.Type_Null:
case *celpb.Type_Null:
return NullType, nil
case *exprpb.Type_Primitive:
case *celpb.Type_Primitive:
switch t.GetPrimitive() {
case exprpb.Type_BOOL:
case celpb.Type_BOOL:
return BoolType, nil
case exprpb.Type_BYTES:
case celpb.Type_BYTES:
return BytesType, nil
case exprpb.Type_DOUBLE:
case celpb.Type_DOUBLE:
return DoubleType, nil
case exprpb.Type_INT64:
case celpb.Type_INT64:
return IntType, nil
case exprpb.Type_STRING:
case celpb.Type_STRING:
return StringType, nil
case exprpb.Type_UINT64:
case celpb.Type_UINT64:
return UintType, nil
default:
return nil, fmt.Errorf("unsupported primitive type: %v", t)
}
case *exprpb.Type_TypeParam:
case *celpb.Type_TypeParam:
return NewTypeParamType(t.GetTypeParam()), nil
case *exprpb.Type_Type:
case *celpb.Type_Type:
if t.GetType().GetTypeKind() != nil {
p, err := ExprTypeToType(t.GetType())
p, err := ProtoAsType(t.GetType())
if err != nil {
return nil, err
}
return NewTypeTypeWithParam(p), nil
}
return TypeType, nil
case *exprpb.Type_WellKnown:
case *celpb.Type_WellKnown:
switch t.GetWellKnown() {
case exprpb.Type_ANY:
case celpb.Type_ANY:
return AnyType, nil
case exprpb.Type_DURATION:
case celpb.Type_DURATION:
return DurationType, nil
case exprpb.Type_TIMESTAMP:
case celpb.Type_TIMESTAMP:
return TimestampType, nil
default:
return nil, fmt.Errorf("unsupported well-known type: %v", t)
}
case *exprpb.Type_Wrapper:
t, err := ExprTypeToType(&exprpb.Type{TypeKind: &exprpb.Type_Primitive{Primitive: t.GetWrapper()}})
case *celpb.Type_Wrapper:
t, err := ProtoAsType(&celpb.Type{TypeKind: &celpb.Type_Primitive{Primitive: t.GetWrapper()}})
if err != nil {
return nil, err
}
return NewNullableType(t), nil
case *exprpb.Type_Error:
case *celpb.Type_Error:
return ErrorType, nil
default:
return nil, fmt.Errorf("unsupported type: %v", t)
@@ -776,6 +793,23 @@ func maybeForeignType(t ref.Type) *Type {
return NewObjectType(t.TypeName(), traitMask)
}
func convertProto(src, dst proto.Message) error {
pb, err := proto.Marshal(src)
if err != nil {
return err
}
err = proto.Unmarshal(pb, dst)
return err
}
func primitiveType(primitive celpb.Type_PrimitiveType) *celpb.Type {
return &celpb.Type{
TypeKind: &celpb.Type_Primitive{
Primitive: primitive,
},
}
}
var (
checkedWellKnowns = map[string]*Type{
// Wrapper types.
@@ -820,4 +854,11 @@ var (
}
structTypeTraitMask = traits.FieldTesterType | traits.IndexerType
boolType = primitiveType(celpb.Type_BOOL)
bytesType = primitiveType(celpb.Type_BYTES)
doubleType = primitiveType(celpb.Type_DOUBLE)
intType = primitiveType(celpb.Type_INT64)
stringType = primitiveType(celpb.Type_STRING)
uintType = primitiveType(celpb.Type_UINT64)
)

View File

@@ -24,6 +24,7 @@ go_library(
"//cel:go_default_library",
"//checker:go_default_library",
"//common/ast:go_default_library",
"//common/decls:go_default_library",
"//common/overloads:go_default_library",
"//common/operators:go_default_library",
"//common/types:go_default_library",
@@ -31,6 +32,7 @@ go_library(
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"//interpreter:go_default_library",
"//parser:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
"@org_golang_google_protobuf//types/known/structpb",
@@ -61,8 +63,8 @@ go_test(
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"//test:go_default_library",
"//test/proto2pb:go_default_library",
"//test/proto3pb:go_default_library",
"//test/proto2pb:go_default_library",
"//test/proto3pb:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//types/known/wrapperspb:go_default_library",
"@org_golang_google_protobuf//encoding/protojson:go_default_library",

View File

@@ -3,12 +3,12 @@
CEL extensions are a related set of constants, functions, macros, or other
features which may not be covered by the core CEL spec.
## Bindings
## Bindings
Returns a cel.EnvOption to configure support for local variable bindings
in expressions.
# Cel.Bind
### Cel.Bind
Binds a simple identifier to an initialization expression which may be used
in a subsequenct result expression. Bindings may also be nested within each
@@ -19,11 +19,11 @@ other.
Examples:
cel.bind(a, 'hello',
cel.bind(b, 'world', a + b + b + a)) // "helloworldworldhello"
cel.bind(b, 'world', a + b + b + a)) // "helloworldworldhello"
// Avoid a list allocation within the exists comprehension.
cel.bind(valid_values, [a, b, c],
[d, e, f].exists(elem, elem in valid_values))
[d, e, f].exists(elem, elem in valid_values))
Local bindings are not guaranteed to be evaluated before use.
@@ -100,7 +100,8 @@ argument. Simple numeric and list literals are supported as valid argument
types; however, other literals will be flagged as errors during macro
expansion. If the argument expression does not resolve to a numeric or
list(numeric) type during type-checking, or during runtime then an error
will be produced. If a list argument is empty, this too will produce an error.
will be produced. If a list argument is empty, this too will produce an
error.
math.least(<arg>, ...) -> <double|int|uint>
@@ -117,6 +118,244 @@ Examples:
math.least(a, b) // check-time error if a or b is non-numeric
math.least(dyn('string')) // runtime error
### Math.BitOr
Introduced at version: 1
Performs a bitwise-OR operation over two int or uint values.
math.bitOr(<int>, <int>) -> <int>
math.bitOr(<uint>, <uint>) -> <uint>
Examples:
math.bitOr(1u, 2u) // returns 3u
math.bitOr(-2, -4) // returns -2
### Math.BitAnd
Introduced at version: 1
Performs a bitwise-AND operation over two int or uint values.
math.bitAnd(<int>, <int>) -> <int>
math.bitAnd(<uint>, <uint>) -> <uint>
Examples:
math.bitAnd(3u, 2u) // return 2u
math.bitAnd(3, 5) // returns 3
math.bitAnd(-3, -5) // returns -7
### Math.BitXor
Introduced at version: 1
math.bitXor(<int>, <int>) -> <int>
math.bitXor(<uint>, <uint>) -> <uint>
Performs a bitwise-XOR operation over two int or uint values.
Examples:
math.bitXor(3u, 5u) // returns 6u
math.bitXor(1, 3) // returns 2
### Math.BitNot
Introduced at version: 1
Function which accepts a single int or uint and performs a bitwise-NOT
ones-complement of the given binary value.
math.bitNot(<int>) -> <int>
math.bitNot(<uint>) -> <uint>
Examples
math.bitNot(1) // returns -1
math.bitNot(-1) // return 0
math.bitNot(0u) // returns 18446744073709551615u
### Math.BitShiftLeft
Introduced at version: 1
Perform a left shift of bits on the first parameter, by the amount of bits
specified in the second parameter. The first parameter is either a uint or
an int. The second parameter must be an int.
When the second parameter is 64 or greater, 0 will be always be returned
since the number of bits shifted is greater than or equal to the total bit
length of the number being shifted. Negative valued bit shifts will result
in a runtime error.
math.bitShiftLeft(<int>, <int>) -> <int>
math.bitShiftLeft(<uint>, <int>) -> <uint>
Examples
math.bitShiftLeft(1, 2) // returns 4
math.bitShiftLeft(-1, 2) // returns -4
math.bitShiftLeft(1u, 2) // return 4u
math.bitShiftLeft(1u, 200) // returns 0u
### Math.BitShiftRight
Introduced at version: 1
Perform a right shift of bits on the first parameter, by the amount of bits
specified in the second parameter. The first parameter is either a uint or
an int. The second parameter must be an int.
When the second parameter is 64 or greater, 0 will always be returned since
the number of bits shifted is greater than or equal to the total bit length
of the number being shifted. Negative valued bit shifts will result in a
runtime error.
The sign bit extension will not be preserved for this operation: vacant bits
on the left are filled with 0.
math.bitShiftRight(<int>, <int>) -> <int>
math.bitShiftRight(<uint>, <int>) -> <uint>
Examples
math.bitShiftRight(1024, 2) // returns 256
math.bitShiftRight(1024u, 2) // returns 256u
math.bitShiftRight(1024u, 64) // returns 0u
### Math.Ceil
Introduced at version: 1
Compute the ceiling of a double value.
math.ceil(<double>) -> <double>
Examples:
math.ceil(1.2) // returns 2.0
math.ceil(-1.2) // returns -1.0
### Math.Floor
Introduced at version: 1
Compute the floor of a double value.
math.floor(<double>) -> <double>
Examples:
math.floor(1.2) // returns 1.0
math.floor(-1.2) // returns -2.0
### Math.Round
Introduced at version: 1
Rounds the double value to the nearest whole number with ties rounding away
from zero, e.g. 1.5 -> 2.0, -1.5 -> -2.0.
math.round(<double>) -> <double>
Examples:
math.round(1.2) // returns 1.0
math.round(1.5) // returns 2.0
math.round(-1.5) // returns -2.0
### Math.Trunc
Introduced at version: 1
Truncates the fractional portion of the double value.
math.trunc(<double>) -> <double>
Examples:
math.trunc(-1.3) // returns -1.0
math.trunc(1.3) // returns 1.0
### Math.Abs
Introduced at version: 1
Returns the absolute value of the numeric type provided as input. If the
value is NaN, the output is NaN. If the input is int64 min, the function
will result in an overflow error.
math.abs(<double>) -> <double>
math.abs(<int>) -> <int>
math.abs(<uint>) -> <uint>
Examples:
math.abs(-1) // returns 1
math.abs(1) // returns 1
math.abs(-9223372036854775808) // overlflow error
### Math.Sign
Introduced at version: 1
Returns the sign of the numeric type, either -1, 0, 1 as an int, double, or
uint depending on the overload. For floating point values, if NaN is
provided as input, the output is also NaN. The implementation does not
differentiate between positive and negative zero.
math.sign(<double>) -> <double>
math.sign(<int>) -> <int>
math.sign(<uint>) -> <uint>
Examples:
math.sign(-42) // returns -1
math.sign(0) // returns 0
math.sign(42) // returns 1
### Math.IsInf
Introduced at version: 1
Returns true if the input double value is -Inf or +Inf.
math.isInf(<double>) -> <bool>
Examples:
math.isInf(1.0/0.0) // returns true
math.isInf(1.2) // returns false
### Math.IsNaN
Introduced at version: 1
Returns true if the input double value is NaN, false otherwise.
math.isNaN(<double>) -> <bool>
Examples:
math.isNaN(0.0/0.0) // returns true
math.isNaN(1.2) // returns false
### Math.IsFinite
Introduced at version: 1
Returns true if the value is a finite number. Equivalent in behavior to:
!math.isNaN(double) && !math.isInf(double)
math.isFinite(<double>) -> <bool>
Examples:
math.isFinite(0.0/0.0) // returns false
math.isFinite(1.2) // returns true
## Protos
Protos configure extended macros and functions for proto manipulation.
@@ -154,6 +393,65 @@ Example:
Extended functions for list manipulation. As a general note, all indices are
zero-based.
### Distinct
**Introduced in version 2**
Returns the distinct elements of a list.
<list(T)>.distinct() -> <list(T)>
Examples:
[1, 2, 2, 3, 3, 3].distinct() // return [1, 2, 3]
["b", "b", "c", "a", "c"].distinct() // return ["b", "c", "a"]
[1, "b", 2, "b"].distinct() // return [1, "b", 2]
### Flatten
**Introduced in version 1**
Flattens a list recursively.
If an optional depth is provided, the list is flattened to a the specificied level.
A negative depth value will result in an error.
<list>.flatten(<list>) -> <list>
<list>.flatten(<list>, <int>) -> <list>
Examples:
[1,[2,3],[4]].flatten() // return [1, 2, 3, 4]
[1,[2,[3,4]]].flatten() // return [1, 2, [3, 4]]
[1,2,[],[],[3,4]].flatten() // return [1, 2, 3, 4]
[1,[2,[3,[4]]]].flatten(2) // return [1, 2, 3, [4]]
[1,[2,[3,[4]]]].flatten(-1) // error
### Range
**Introduced in version 2**
Returns a list of integers from 0 to n-1.
lists.range(<int>) -> <list(int)>
Examples:
lists.range(5) -> [0, 1, 2, 3, 4]
### Reverse
**Introduced in version 2**
Returns the elements of a list in reverse order.
<list(T)>.reverse() -> <list(T)>
Examples:
[5, 3, 1, 2].reverse() // return [2, 1, 3, 5]
### Slice
@@ -164,7 +462,43 @@ Returns a new sub-list using the indexes provided.
Examples:
[1,2,3,4].slice(1, 3) // return [2, 3]
[1,2,3,4].slice(2, 4) // return [3 ,4]
[1,2,3,4].slice(2, 4) // return [3, 4]
### Sort
**Introduced in version 2**
Sorts a list with comparable elements. If the element type is not comparable
or the element types are not the same, the function will produce an error.
<list(T)>.sort() -> <list(T)>
T in {int, uint, double, bool, duration, timestamp, string, bytes}
Examples:
[3, 2, 1].sort() // return [1, 2, 3]
["b", "c", "a"].sort() // return ["a", "b", "c"]
[1, "b"].sort() // error
[[1, 2, 3]].sort() // error
### SortBy
**Introduced in version 2**
Sorts a list by a key value, i.e., the order is determined by the result of
an expression applied to each element of the list.
<list(T)>.sortBy(<bindingName>, <keyExpr>) -> <list(T)>
keyExpr returns a value in {int, uint, double, bool, duration, timestamp, string, bytes}
Examples:
[
Player { name: "foo", score: 0 },
Player { name: "bar", score: -10 },
Player { name: "baz", score: 1000 },
].sortBy(e, e.score).map(e, e.name)
== ["bar", "foo", "baz"]
## Sets
@@ -259,7 +593,8 @@ Examples:
'hello mellow'.indexOf('jello') // returns -1
'hello mellow'.indexOf('', 2) // returns 2
'hello mellow'.indexOf('ello', 2) // returns 7
'hello mellow'.indexOf('ello', 20) // error
'hello mellow'.indexOf('ello', 20) // returns -1
'hello mellow'.indexOf('ello', -1) // error
### Join
@@ -273,10 +608,10 @@ elements in the resulting string.
Examples:
['hello', 'mellow'].join() // returns 'hellomellow'
['hello', 'mellow'].join(' ') // returns 'hello mellow'
[].join() // returns ''
[].join('/') // returns ''
['hello', 'mellow'].join() // returns 'hellomellow'
['hello', 'mellow'].join(' ') // returns 'hello mellow'
[].join() // returns ''
[].join('/') // returns ''
### LastIndexOf
@@ -297,6 +632,7 @@ Examples:
'hello mellow'.lastIndexOf('ello') // returns 7
'hello mellow'.lastIndexOf('jello') // returns -1
'hello mellow'.lastIndexOf('ello', 6) // returns 1
'hello mellow'.lastIndexOf('ello', 20) // returns -1
'hello mellow'.lastIndexOf('ello', -1) // error
### LowerAscii
@@ -427,4 +763,137 @@ It can be located in Version 3 of strings.
Examples:
'gums'.reverse() // returns 'smug'
'John Smith'.reverse() // returns 'htimS nhoJ'
'John Smith'.reverse() // returns 'htimS nhoJ'
## TwoVarComprehensions
TwoVarComprehensions introduces support for two-variable comprehensions.
The two-variable form of comprehensions looks similar to the one-variable
counterparts. Where possible, the same macro names were used and additional
macro signatures added. The notable distinction for two-variable comprehensions
is the introduction of `transformList`, `transformMap`, and `transformMapEntry`
support for list and map types rather than the more traditional `map` and
`filter` macros.
### All
Comprehension which tests whether all elements in the list or map satisfy a
given predicate. The `all` macro evaluates in a manner consistent with logical
AND and will short-circuit when encountering a `false` value.
<list>.all(indexVar, valueVar, <predicate>) -> bool
<map>.all(keyVar, valueVar, <predicate>) -> bool
Examples:
[1, 2, 3].all(i, j, i < j) // returns true
{'hello': 'world', 'taco': 'taco'}.all(k, v, k != v) // returns false
// Combines two-variable comprehension with single variable
{'h': ['hello', 'hi'], 'j': ['joke', 'jog']}
.all(k, vals, vals.all(v, v.startsWith(k))) // returns true
### Exists
Comprehension which tests whether any element in a list or map exists which
satisfies a given predicate. The `exists` macro evaluates in a manner consistent
with logical OR and will short-circuit when encountering a `true` value.
<list>.exists(indexVar, valueVar, <predicate>) -> bool
<map>.exists(keyVar, valueVar, <predicate>) -> bool
Examples:
{'greeting': 'hello', 'farewell': 'goodbye'}
.exists(k, v, k.startsWith('good') || v.endsWith('bye')) // returns true
[1, 2, 4, 8, 16].exists(i, v, v == 1024 && i == 10) // returns false
### ExistsOne
Comprehension which tests whether exactly one element in a list or map exists
which satisfies a given predicate expression. This comprehension does not
short-circuit in keeping with the one-variable exists one macro semantics.
<list>.existsOne(indexVar, valueVar, <predicate>)
<map>.existsOne(keyVar, valueVar, <predicate>)
This macro may also be used with the `exists_one` function name, for
compatibility with the one-variable macro of the same name.
Examples:
[1, 2, 1, 3, 1, 4].existsOne(i, v, i == 1 || v == 1) // returns false
[1, 1, 2, 2, 3, 3].existsOne(i, v, i == 2 && v == 2) // returns true
{'i': 0, 'j': 1, 'k': 2}.existsOne(i, v, i == 'l' || v == 1) // returns true
### TransformList
Comprehension which converts a map or a list into a list value. The output
expression of the comprehension determines the contents of the output list.
Elements in the list may optionally be filtered according to a predicate
expression, where elements that satisfy the predicate are transformed.
<list>.transformList(indexVar, valueVar, <transform>)
<list>.transformList(indexVar, valueVar, <filter>, <transform>)
<map>.transformList(keyVar, valueVar, <transform>)
<map>.transformList(keyVar, valueVar, <filter>, <transform>)
Examples:
[1, 2, 3].transformList(indexVar, valueVar,
(indexVar * valueVar) + valueVar) // returns [1, 4, 9]
[1, 2, 3].transformList(indexVar, valueVar, indexVar % 2 == 0
(indexVar * valueVar) + valueVar) // returns [1, 9]
{'greeting': 'hello', 'farewell': 'goodbye'}
.transformList(k, _, k) // returns ['greeting', 'farewell']
{'greeting': 'hello', 'farewell': 'goodbye'}
.transformList(_, v, v) // returns ['hello', 'goodbye']
### TransformMap
Comprehension which converts a map or a list into a map value. The output
expression of the comprehension determines the value of the output map entry;
however, the key remains fixed. Elements in the map may optionally be filtered
according to a predicate expression, where elements that satisfy the predicate
are transformed.
<list>.transformMap(indexVar, valueVar, <transform>)
<list>.transformMap(indexVar, valueVar, <filter>, <transform>)
<map>.transformMap(keyVar, valueVar, <transform>)
<map>.transformMap(keyVar, valueVar, <filter>, <transform>)
Examples:
[1, 2, 3].transformMap(indexVar, valueVar,
(indexVar * valueVar) + valueVar) // returns {0: 1, 1: 4, 2: 9}
[1, 2, 3].transformMap(indexVar, valueVar, indexVar % 2 == 0
(indexVar * valueVar) + valueVar) // returns {0: 1, 2: 9}
{'greeting': 'hello'}.transformMap(k, v, v + '!') // returns {'greeting': 'hello!'}
### TransformMapEntry
Comprehension which converts a map or a list into a map value; however, this
transform expects the entry expression be a map literal. If the transform
produces an entry which duplicates a key in the target map, the comprehension
will error. Note, that key equality is determined using CEL equality which
asserts that numeric values which are equal, even if they don't have the same
type will cause a key collision.
Elements in the map may optionally be filtered according to a predicate
expression, where elements that satisfy the predicate are transformed.
<list>.transformMap(indexVar, valueVar, <transform>)
<list>.transformMap(indexVar, valueVar, <filter>, <transform>)
<map>.transformMap(keyVar, valueVar, <transform>)
<map>.transformMap(keyVar, valueVar, <filter>, <transform>)
Examples:
// returns {'hello': 'greeting'}
{'greeting': 'hello'}.transformMapEntry(keyVar, valueVar, {valueVar: keyVar})
// reverse lookup, require all values in list be unique
[1, 2, 3].transformMapEntry(indexVar, valueVar, {valueVar: indexVar})
{'greeting': 'aloha', 'farewell': 'aloha'}
.transformMapEntry(keyVar, valueVar, {valueVar: keyVar}) // error, duplicate key

View File

@@ -15,9 +15,19 @@
package ext
import (
"errors"
"fmt"
"math"
"strconv"
"strings"
"sync"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/common/ast"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
"github.com/google/cel-go/interpreter"
)
// Bindings returns a cel.EnvOption to configure support for local variable
@@ -41,35 +51,120 @@ import (
// [d, e, f].exists(elem, elem in valid_values))
//
// Local bindings are not guaranteed to be evaluated before use.
func Bindings() cel.EnvOption {
return cel.Lib(celBindings{})
func Bindings(options ...BindingsOption) cel.EnvOption {
b := &celBindings{version: math.MaxUint32}
for _, o := range options {
b = o(b)
}
return cel.Lib(b)
}
const (
celNamespace = "cel"
bindMacro = "bind"
blockFunc = "@block"
unusedIterVar = "#unused"
)
type celBindings struct{}
// BindingsOption declares a functional operator for configuring the Bindings library behavior.
type BindingsOption func(*celBindings) *celBindings
func (celBindings) LibraryName() string {
// BindingsVersion sets the version of the bindings library to an explicit version.
func BindingsVersion(version uint32) BindingsOption {
return func(lib *celBindings) *celBindings {
lib.version = version
return lib
}
}
type celBindings struct {
version uint32
}
func (*celBindings) LibraryName() string {
return "cel.lib.ext.cel.bindings"
}
func (celBindings) CompileOptions() []cel.EnvOption {
return []cel.EnvOption{
func (lib *celBindings) CompileOptions() []cel.EnvOption {
opts := []cel.EnvOption{
cel.Macros(
// cel.bind(var, <init>, <expr>)
cel.ReceiverMacro(bindMacro, 3, celBind),
),
}
if lib.version >= 1 {
// The cel.@block signature takes a list of subexpressions and a typed expression which is
// used as the output type.
paramType := cel.TypeParamType("T")
opts = append(opts,
cel.Function("cel.@block",
cel.Overload("cel_block_list",
[]*cel.Type{cel.ListType(cel.DynType), paramType}, paramType)),
)
opts = append(opts, cel.ASTValidators(blockValidationExemption{}))
}
return opts
}
func (celBindings) ProgramOptions() []cel.ProgramOption {
func (lib *celBindings) ProgramOptions() []cel.ProgramOption {
if lib.version >= 1 {
celBlockPlan := func(i interpreter.Interpretable) (interpreter.Interpretable, error) {
call, ok := i.(interpreter.InterpretableCall)
if !ok {
return i, nil
}
switch call.Function() {
case "cel.@block":
args := call.Args()
if len(args) != 2 {
return nil, fmt.Errorf("cel.@block expects two arguments, but got %d", len(args))
}
expr := args[1]
// Non-empty block
if block, ok := args[0].(interpreter.InterpretableConstructor); ok {
slotExprs := block.InitVals()
return newDynamicBlock(slotExprs, expr), nil
}
// Constant valued block which can happen during runtime optimization.
if cons, ok := args[0].(interpreter.InterpretableConst); ok {
if cons.Value().Type() == types.ListType {
l := cons.Value().(traits.Lister)
if l.Size().Equal(types.IntZero) == types.True {
return args[1], nil
}
return newConstantBlock(l, expr), nil
}
}
return nil, errors.New("cel.@block expects a list constructor as the first argument")
default:
return i, nil
}
}
return []cel.ProgramOption{cel.CustomDecorator(celBlockPlan)}
}
return []cel.ProgramOption{}
}
type blockValidationExemption struct{}
// Name returns the name of the validator.
func (blockValidationExemption) Name() string {
return "cel.lib.ext.validate.functions.cel.block"
}
// Configure implements the ASTValidatorConfigurer interface and augments the list of functions to skip
// during homogeneous aggregate literal type-checks.
func (blockValidationExemption) Configure(config cel.MutableValidatorConfig) error {
functions := config.GetOrDefault(cel.HomogeneousAggregateLiteralExemptFunctions, []string{}).([]string)
functions = append(functions, "cel.@block")
return config.Set(cel.HomogeneousAggregateLiteralExemptFunctions, functions)
}
// Validate is a no-op as the intent is to simply disable strong type-checks for list literals during
// when they occur within cel.@block calls as the arg types have already been validated.
func (blockValidationExemption) Validate(env *cel.Env, _ cel.ValidatorConfig, a *ast.AST, iss *cel.Issues) {
}
func celBind(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
if !macroTargetMatchesNamespace(celNamespace, target) {
return nil, nil
@@ -94,3 +189,148 @@ func celBind(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Ex
resultExpr,
), nil
}
func newDynamicBlock(slotExprs []interpreter.Interpretable, expr interpreter.Interpretable) interpreter.Interpretable {
bs := &dynamicBlock{
slotExprs: slotExprs,
expr: expr,
}
bs.slotActivationPool = &sync.Pool{
New: func() any {
slotCount := len(slotExprs)
sa := &dynamicSlotActivation{
slotExprs: slotExprs,
slotCount: slotCount,
slotVals: make([]*slotVal, slotCount),
}
for i := 0; i < slotCount; i++ {
sa.slotVals[i] = &slotVal{}
}
return sa
},
}
return bs
}
type dynamicBlock struct {
slotExprs []interpreter.Interpretable
expr interpreter.Interpretable
slotActivationPool *sync.Pool
}
// ID implements the Interpretable interface method.
func (b *dynamicBlock) ID() int64 {
return b.expr.ID()
}
// Eval implements the Interpretable interface method.
func (b *dynamicBlock) Eval(activation interpreter.Activation) ref.Val {
sa := b.slotActivationPool.Get().(*dynamicSlotActivation)
sa.Activation = activation
defer b.clearSlots(sa)
return b.expr.Eval(sa)
}
func (b *dynamicBlock) clearSlots(sa *dynamicSlotActivation) {
sa.reset()
b.slotActivationPool.Put(sa)
}
type slotVal struct {
value *ref.Val
visited bool
}
type dynamicSlotActivation struct {
interpreter.Activation
slotExprs []interpreter.Interpretable
slotCount int
slotVals []*slotVal
}
// ResolveName implements the Activation interface method but handles variables prefixed with `@index`
// as special variables which exist within the slot-based memory of the cel.@block() where each slot
// refers to an expression which must be computed only once.
func (sa *dynamicSlotActivation) ResolveName(name string) (any, bool) {
if idx, found := matchSlot(name, sa.slotCount); found {
v := sa.slotVals[idx]
if v.visited {
// Return not found if the index expression refers to itself
if v.value == nil {
return nil, false
}
return *v.value, true
}
v.visited = true
val := sa.slotExprs[idx].Eval(sa)
v.value = &val
return val, true
}
return sa.Activation.ResolveName(name)
}
func (sa *dynamicSlotActivation) reset() {
sa.Activation = nil
for _, sv := range sa.slotVals {
sv.visited = false
sv.value = nil
}
}
func newConstantBlock(slots traits.Lister, expr interpreter.Interpretable) interpreter.Interpretable {
count := slots.Size().(types.Int)
return &constantBlock{slots: slots, slotCount: int(count), expr: expr}
}
type constantBlock struct {
slots traits.Lister
slotCount int
expr interpreter.Interpretable
}
// ID implements the interpreter.Interpretable interface method.
func (b *constantBlock) ID() int64 {
return b.expr.ID()
}
// Eval implements the interpreter.Interpretable interface method, and will proxy @index prefixed variable
// lookups into a set of constant slots determined from the plan step.
func (b *constantBlock) Eval(activation interpreter.Activation) ref.Val {
vars := constantSlotActivation{Activation: activation, slots: b.slots, slotCount: b.slotCount}
return b.expr.Eval(vars)
}
type constantSlotActivation struct {
interpreter.Activation
slots traits.Lister
slotCount int
}
// ResolveName implements Activation interface method and proxies @index prefixed lookups into the slot
// activation associated with the block scope.
func (sa constantSlotActivation) ResolveName(name string) (any, bool) {
if idx, found := matchSlot(name, sa.slotCount); found {
return sa.slots.Get(types.Int(idx)), true
}
return sa.Activation.ResolveName(name)
}
func matchSlot(name string, slotCount int) (int, bool) {
if idx, found := strings.CutPrefix(name, indexPrefix); found {
idx, err := strconv.Atoi(idx)
// Return not found if the index is not numeric
if err != nil {
return -1, false
}
// Return not found if the index is not a valid slot
if idx < 0 || idx >= slotCount {
return -1, false
}
return idx, true
}
return -1, false
}
var (
indexPrefix = "@index"
)

410
vendor/github.com/google/cel-go/ext/comprehensions.go generated vendored Normal file
View File

@@ -0,0 +1,410 @@
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package ext
import (
"fmt"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/common/ast"
"github.com/google/cel-go/common/operators"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
"github.com/google/cel-go/parser"
)
const (
mapInsert = "cel.@mapInsert"
mapInsertOverloadMap = "@mapInsert_map_map"
mapInsertOverloadKeyValue = "@mapInsert_map_key_value"
)
// TwoVarComprehensions introduces support for two-variable comprehensions.
//
// The two-variable form of comprehensions looks similar to the one-variable counterparts.
// Where possible, the same macro names were used and additional macro signatures added.
// The notable distinction for two-variable comprehensions is the introduction of
// `transformList`, `transformMap`, and `transformMapEntry` support for list and map types
// rather than the more traditional `map` and `filter` macros.
//
// # All
//
// Comprehension which tests whether all elements in the list or map satisfy a given
// predicate. The `all` macro evaluates in a manner consistent with logical AND and will
// short-circuit when encountering a `false` value.
//
// <list>.all(indexVar, valueVar, <predicate>) -> bool
// <map>.all(keyVar, valueVar, <predicate>) -> bool
//
// Examples:
//
// [1, 2, 3].all(i, j, i < j) // returns true
// {'hello': 'world', 'taco': 'taco'}.all(k, v, k != v) // returns false
//
// // Combines two-variable comprehension with single variable
// {'h': ['hello', 'hi'], 'j': ['joke', 'jog']}
// .all(k, vals, vals.all(v, v.startsWith(k))) // returns true
//
// # Exists
//
// Comprehension which tests whether any element in a list or map exists which satisfies
// a given predicate. The `exists` macro evaluates in a manner consistent with logical OR
// and will short-circuit when encountering a `true` value.
//
// <list>.exists(indexVar, valueVar, <predicate>) -> bool
// <map>.exists(keyVar, valueVar, <predicate>) -> bool
//
// Examples:
//
// {'greeting': 'hello', 'farewell': 'goodbye'}
// .exists(k, v, k.startsWith('good') || v.endsWith('bye')) // returns true
// [1, 2, 4, 8, 16].exists(i, v, v == 1024 && i == 10) // returns false
//
// # ExistsOne
//
// Comprehension which tests whether exactly one element in a list or map exists which
// satisfies a given predicate expression. This comprehension does not short-circuit in
// keeping with the one-variable exists one macro semantics.
//
// <list>.existsOne(indexVar, valueVar, <predicate>)
// <map>.existsOne(keyVar, valueVar, <predicate>)
//
// This macro may also be used with the `exists_one` function name, for compatibility
// with the one-variable macro of the same name.
//
// Examples:
//
// [1, 2, 1, 3, 1, 4].existsOne(i, v, i == 1 || v == 1) // returns false
// [1, 1, 2, 2, 3, 3].existsOne(i, v, i == 2 && v == 2) // returns true
// {'i': 0, 'j': 1, 'k': 2}.existsOne(i, v, i == 'l' || v == 1) // returns true
//
// # TransformList
//
// Comprehension which converts a map or a list into a list value. The output expression
// of the comprehension determines the contents of the output list. Elements in the list
// may optionally be filtered according to a predicate expression, where elements that
// satisfy the predicate are transformed.
//
// <list>.transformList(indexVar, valueVar, <transform>)
// <list>.transformList(indexVar, valueVar, <filter>, <transform>)
// <map>.transformList(keyVar, valueVar, <transform>)
// <map>.transformList(keyVar, valueVar, <filter>, <transform>)
//
// Examples:
//
// [1, 2, 3].transformList(indexVar, valueVar,
// (indexVar * valueVar) + valueVar) // returns [1, 4, 9]
// [1, 2, 3].transformList(indexVar, valueVar, indexVar % 2 == 0
// (indexVar * valueVar) + valueVar) // returns [1, 9]
// {'greeting': 'hello', 'farewell': 'goodbye'}
// .transformList(k, _, k) // returns ['greeting', 'farewell']
// {'greeting': 'hello', 'farewell': 'goodbye'}
// .transformList(_, v, v) // returns ['hello', 'goodbye']
//
// # TransformMap
//
// Comprehension which converts a map or a list into a map value. The output expression
// of the comprehension determines the value of the output map entry; however, the key
// remains fixed. Elements in the map may optionally be filtered according to a predicate
// expression, where elements that satisfy the predicate are transformed.
//
// <list>.transformMap(indexVar, valueVar, <transform>)
// <list>.transformMap(indexVar, valueVar, <filter>, <transform>)
// <map>.transformMap(keyVar, valueVar, <transform>)
// <map>.transformMap(keyVar, valueVar, <filter>, <transform>)
//
// Examples:
//
// [1, 2, 3].transformMap(indexVar, valueVar,
// (indexVar * valueVar) + valueVar) // returns {0: 1, 1: 4, 2: 9}
// [1, 2, 3].transformMap(indexVar, valueVar, indexVar % 2 == 0
// (indexVar * valueVar) + valueVar) // returns {0: 1, 2: 9}
// {'greeting': 'hello'}.transformMap(k, v, v + '!') // returns {'greeting': 'hello!'}
//
// # TransformMapEntry
//
// Comprehension which converts a map or a list into a map value; however, this transform
// expects the entry expression be a map literal. If the tranform produces an entry which
// duplicates a key in the target map, the comprehension will error. Note, that key
// equality is determined using CEL equality which asserts that numeric values which are
// equal, even if they don't have the same type will cause a key collision.
//
// Elements in the map may optionally be filtered according to a predicate expression, where
// elements that satisfy the predicate are transformed.
//
// <list>.transformMap(indexVar, valueVar, <transform>)
// <list>.transformMap(indexVar, valueVar, <filter>, <transform>)
// <map>.transformMap(keyVar, valueVar, <transform>)
// <map>.transformMap(keyVar, valueVar, <filter>, <transform>)
//
// Examples:
//
// // returns {'hello': 'greeting'}
// {'greeting': 'hello'}.transformMapEntry(keyVar, valueVar, {valueVar: keyVar})
// // reverse lookup, require all values in list be unique
// [1, 2, 3].transformMapEntry(indexVar, valueVar, {valueVar: indexVar})
//
// {'greeting': 'aloha', 'farewell': 'aloha'}
// .transformMapEntry(keyVar, valueVar, {valueVar: keyVar}) // error, duplicate key
func TwoVarComprehensions() cel.EnvOption {
return cel.Lib(compreV2Lib{})
}
type compreV2Lib struct{}
// LibraryName implements that SingletonLibrary interface method.
func (compreV2Lib) LibraryName() string {
return "cel.lib.ext.comprev2"
}
// CompileOptions implements the cel.Library interface method.
func (compreV2Lib) CompileOptions() []cel.EnvOption {
kType := cel.TypeParamType("K")
vType := cel.TypeParamType("V")
mapKVType := cel.MapType(kType, vType)
opts := []cel.EnvOption{
cel.Macros(
cel.ReceiverMacro("all", 3, quantifierAll),
cel.ReceiverMacro("exists", 3, quantifierExists),
cel.ReceiverMacro("existsOne", 3, quantifierExistsOne),
cel.ReceiverMacro("exists_one", 3, quantifierExistsOne),
cel.ReceiverMacro("transformList", 3, transformList),
cel.ReceiverMacro("transformList", 4, transformList),
cel.ReceiverMacro("transformMap", 3, transformMap),
cel.ReceiverMacro("transformMap", 4, transformMap),
cel.ReceiverMacro("transformMapEntry", 3, transformMapEntry),
cel.ReceiverMacro("transformMapEntry", 4, transformMapEntry),
),
cel.Function(mapInsert,
cel.Overload(mapInsertOverloadKeyValue, []*cel.Type{mapKVType, kType, vType}, mapKVType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
m := args[0].(traits.Mapper)
k := args[1]
v := args[2]
return types.InsertMapKeyValue(m, k, v)
})),
cel.Overload(mapInsertOverloadMap, []*cel.Type{mapKVType, mapKVType}, mapKVType,
cel.BinaryBinding(func(targetMap, updateMap ref.Val) ref.Val {
tm := targetMap.(traits.Mapper)
um := updateMap.(traits.Mapper)
umIt := um.Iterator()
for umIt.HasNext() == types.True {
k := umIt.Next()
updateOrErr := types.InsertMapKeyValue(tm, k, um.Get(k))
if types.IsError(updateOrErr) {
return updateOrErr
}
tm = updateOrErr.(traits.Mapper)
}
return tm
})),
),
}
return opts
}
// ProgramOptions implements the cel.Library interface method
func (compreV2Lib) ProgramOptions() []cel.ProgramOption {
return []cel.ProgramOption{}
}
func quantifierAll(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewLiteral(types.True),
/*condition=*/ mef.NewCall(operators.NotStrictlyFalse, mef.NewAccuIdent()),
/*step=*/ mef.NewCall(operators.LogicalAnd, mef.NewAccuIdent(), args[2]),
/*result=*/ mef.NewAccuIdent(),
), nil
}
func quantifierExists(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewLiteral(types.False),
/*condition=*/ mef.NewCall(operators.NotStrictlyFalse, mef.NewCall(operators.LogicalNot, mef.NewAccuIdent())),
/*step=*/ mef.NewCall(operators.LogicalOr, mef.NewAccuIdent(), args[2]),
/*result=*/ mef.NewAccuIdent(),
), nil
}
func quantifierExistsOne(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewLiteral(types.Int(0)),
/*condition=*/ mef.NewLiteral(types.True),
/*step=*/ mef.NewCall(operators.Conditional, args[2],
mef.NewCall(operators.Add, mef.NewAccuIdent(), mef.NewLiteral(types.Int(1))),
mef.NewAccuIdent()),
/*result=*/ mef.NewCall(operators.Equals, mef.NewAccuIdent(), mef.NewLiteral(types.Int(1))),
), nil
}
func transformList(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
var transform ast.Expr
var filter ast.Expr
if len(args) == 4 {
filter = args[2]
transform = args[3]
} else {
filter = nil
transform = args[2]
}
// __result__ = __result__ + [transform]
step := mef.NewCall(operators.Add, mef.NewAccuIdent(), mef.NewList(transform))
if filter != nil {
// __result__ = (filter) ? __result__ + [transform] : __result__
step = mef.NewCall(operators.Conditional, filter, step, mef.NewAccuIdent())
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewList(),
/*condition=*/ mef.NewLiteral(types.True),
step,
/*result=*/ mef.NewAccuIdent(),
), nil
}
func transformMap(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
var transform ast.Expr
var filter ast.Expr
if len(args) == 4 {
filter = args[2]
transform = args[3]
} else {
filter = nil
transform = args[2]
}
// __result__ = cel.@mapInsert(__result__, iterVar1, transform)
step := mef.NewCall(mapInsert, mef.NewAccuIdent(), mef.NewIdent(iterVar1), transform)
if filter != nil {
// __result__ = (filter) ? cel.@mapInsert(__result__, iterVar1, transform) : __result__
step = mef.NewCall(operators.Conditional, filter, step, mef.NewAccuIdent())
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewMap(),
/*condition=*/ mef.NewLiteral(types.True),
step,
/*result=*/ mef.NewAccuIdent(),
), nil
}
func transformMapEntry(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
iterVar1, iterVar2, err := extractIterVars(mef, args[0], args[1])
if err != nil {
return nil, err
}
var transform ast.Expr
var filter ast.Expr
if len(args) == 4 {
filter = args[2]
transform = args[3]
} else {
filter = nil
transform = args[2]
}
// __result__ = cel.@mapInsert(__result__, transform)
step := mef.NewCall(mapInsert, mef.NewAccuIdent(), transform)
if filter != nil {
// __result__ = (filter) ? cel.@mapInsert(__result__, transform) : __result__
step = mef.NewCall(operators.Conditional, filter, step, mef.NewAccuIdent())
}
return mef.NewComprehensionTwoVar(
target,
iterVar1,
iterVar2,
parser.AccumulatorName,
/*accuInit=*/ mef.NewMap(),
/*condition=*/ mef.NewLiteral(types.True),
step,
/*result=*/ mef.NewAccuIdent(),
), nil
}
func extractIterVars(mef cel.MacroExprFactory, arg0, arg1 ast.Expr) (string, string, *cel.Error) {
iterVar1, err := extractIterVar(mef, arg0)
if err != nil {
return "", "", err
}
iterVar2, err := extractIterVar(mef, arg1)
if err != nil {
return "", "", err
}
if iterVar1 == iterVar2 {
return "", "", mef.NewError(arg1.ID(), fmt.Sprintf("duplicate variable name: %s", iterVar1))
}
if iterVar1 == parser.AccumulatorName {
return "", "", mef.NewError(arg0.ID(), "iteration variable overwrites accumulator variable")
}
if iterVar2 == parser.AccumulatorName {
return "", "", mef.NewError(arg1.ID(), "iteration variable overwrites accumulator variable")
}
return iterVar1, iterVar2, nil
}
func extractIterVar(mef cel.MacroExprFactory, target ast.Expr) (string, *cel.Error) {
iterVar, found := extractIdent(target)
if !found {
return "", mef.NewError(target.ID(), "argument must be a simple name")
}
return iterVar, nil
}

View File

@@ -36,7 +36,7 @@ import (
// Examples:
//
// base64.decode('aGVsbG8=') // return b'hello'
// base64.decode('aGVsbG8') // error
// base64.decode('aGVsbG8') // return b'hello'
//
// # Base64.Encode
//
@@ -79,7 +79,14 @@ func (encoderLib) ProgramOptions() []cel.ProgramOption {
}
func base64DecodeString(str string) ([]byte, error) {
return base64.StdEncoding.DecodeString(str)
b, err := base64.StdEncoding.DecodeString(str)
if err == nil {
return b, nil
}
if _, tryAltEncoding := err.(base64.CorruptInputError); tryAltEncoding {
return base64.RawStdEncoding.DecodeString(str)
}
return nil, err
}
func base64EncodeBytes(bytes []byte) (string, error) {

View File

@@ -484,7 +484,7 @@ func matchConstantFormatStringWithListLiteralArgs(a *ast.AST) ast.ExprMatcher {
}
}
formatString := call.Target()
if formatString.Kind() != ast.LiteralKind && formatString.AsLiteral().Type() != cel.StringType {
if formatString.Kind() != ast.LiteralKind || formatString.AsLiteral().Type() != cel.StringType {
return false
}
args := call.Args()

View File

@@ -50,14 +50,18 @@ func listStringOrError(strs []string, err error) ref.Val {
return types.DefaultTypeAdapter.NativeToValue(strs)
}
func macroTargetMatchesNamespace(ns string, target ast.Expr) bool {
func extractIdent(target ast.Expr) (string, bool) {
switch target.Kind() {
case ast.IdentKind:
if target.AsIdent() != ns {
return false
}
return true
return target.AsIdent(), true
default:
return false
return "", false
}
}
func macroTargetMatchesNamespace(ns string, target ast.Expr) bool {
if id, found := extractIdent(target); found {
return id == ns
}
return false
}

View File

@@ -16,15 +16,70 @@ package ext
import (
"fmt"
"math"
"sort"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/common/ast"
"github.com/google/cel-go/common/decls"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
"github.com/google/cel-go/parser"
)
var comparableTypes = []*cel.Type{
cel.IntType,
cel.UintType,
cel.DoubleType,
cel.BoolType,
cel.DurationType,
cel.TimestampType,
cel.StringType,
cel.BytesType,
}
// Lists returns a cel.EnvOption to configure extended functions for list manipulation.
// As a general note, all indices are zero-based.
//
// # Distinct
//
// Introduced in version: 2
//
// Returns the distinct elements of a list.
//
// <list(T)>.distinct() -> <list(T)>
//
// Examples:
//
// [1, 2, 2, 3, 3, 3].distinct() // return [1, 2, 3]
// ["b", "b", "c", "a", "c"].distinct() // return ["b", "c", "a"]
// [1, "b", 2, "b"].distinct() // return [1, "b", 2]
//
// # Range
//
// Introduced in version: 2
//
// Returns a list of integers from 0 to n-1.
//
// lists.range(<int>) -> <list(int)>
//
// Examples:
//
// lists.range(5) -> [0, 1, 2, 3, 4]
//
// # Reverse
//
// Introduced in version: 2
//
// Returns the elements of a list in reverse order.
//
// <list(T)>.reverse() -> <list(T)>
//
// Examples:
//
// [5, 3, 1, 2].reverse() // return [2, 1, 3, 5]
//
// # Slice
//
// Returns a new sub-list using the indexes provided.
@@ -35,21 +90,105 @@ import (
//
// [1,2,3,4].slice(1, 3) // return [2, 3]
// [1,2,3,4].slice(2, 4) // return [3 ,4]
func Lists() cel.EnvOption {
return cel.Lib(listsLib{})
//
// # Flatten
//
// Flattens a list recursively.
// If an optional depth is provided, the list is flattened to a the specificied level.
// A negative depth value will result in an error.
//
// <list>.flatten(<list>) -> <list>
// <list>.flatten(<list>, <int>) -> <list>
//
// Examples:
//
// [1,[2,3],[4]].flatten() // return [1, 2, 3, 4]
// [1,[2,[3,4]]].flatten() // return [1, 2, [3, 4]]
// [1,2,[],[],[3,4]].flatten() // return [1, 2, 3, 4]
// [1,[2,[3,[4]]]].flatten(2) // return [1, 2, 3, [4]]
// [1,[2,[3,[4]]]].flatten(-1) // error
//
// # Sort
//
// Introduced in version: 2
//
// Sorts a list with comparable elements. If the element type is not comparable
// or the element types are not the same, the function will produce an error.
//
// <list(T)>.sort() -> <list(T)>
// T in {int, uint, double, bool, duration, timestamp, string, bytes}
//
// Examples:
//
// [3, 2, 1].sort() // return [1, 2, 3]
// ["b", "c", "a"].sort() // return ["a", "b", "c"]
// [1, "b"].sort() // error
// [[1, 2, 3]].sort() // error
//
// # SortBy
//
// Sorts a list by a key value, i.e., the order is determined by the result of
// an expression applied to each element of the list.
// The output of the key expression must be a comparable type, otherwise the
// function will return an error.
//
// <list(T)>.sortBy(<bindingName>, <keyExpr>) -> <list(T)>
// keyExpr returns a value in {int, uint, double, bool, duration, timestamp, string, bytes}
// Examples:
//
// [
// Player { name: "foo", score: 0 },
// Player { name: "bar", score: -10 },
// Player { name: "baz", score: 1000 },
// ].sortBy(e, e.score).map(e, e.name)
// == ["bar", "foo", "baz"]
func Lists(options ...ListsOption) cel.EnvOption {
l := &listsLib{
version: math.MaxUint32,
}
for _, o := range options {
l = o(l)
}
return cel.Lib(l)
}
type listsLib struct{}
type listsLib struct {
version uint32
}
// LibraryName implements the SingletonLibrary interface method.
func (listsLib) LibraryName() string {
return "cel.lib.ext.lists"
}
// ListsOption is a functional interface for configuring the strings library.
type ListsOption func(*listsLib) *listsLib
// ListsVersion configures the version of the string library.
//
// The version limits which functions are available. Only functions introduced
// below or equal to the given version included in the library. If this option
// is not set, all functions are available.
//
// See the library documentation to determine which version a function was introduced.
// If the documentation does not state which version a function was introduced, it can
// be assumed to be introduced at version 0, when the library was first created.
func ListsVersion(version uint32) ListsOption {
return func(lib *listsLib) *listsLib {
lib.version = version
return lib
}
}
// CompileOptions implements the Library interface method.
func (listsLib) CompileOptions() []cel.EnvOption {
func (lib listsLib) CompileOptions() []cel.EnvOption {
listType := cel.ListType(cel.TypeParamType("T"))
return []cel.EnvOption{
listListType := cel.ListType(listType)
listDyn := cel.ListType(cel.DynType)
opts := []cel.EnvOption{
cel.Function("slice",
cel.MemberOverload("list_slice",
[]*cel.Type{listType, cel.IntType, cel.IntType}, listType,
@@ -66,6 +205,151 @@ func (listsLib) CompileOptions() []cel.EnvOption {
),
),
}
if lib.version >= 1 {
opts = append(opts,
cel.Function("flatten",
cel.MemberOverload("list_flatten",
[]*cel.Type{listListType}, listType,
cel.UnaryBinding(func(arg ref.Val) ref.Val {
list, ok := arg.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(arg)
}
flatList, err := flatten(list, 1)
if err != nil {
return types.WrapErr(err)
}
return types.DefaultTypeAdapter.NativeToValue(flatList)
}),
),
cel.MemberOverload("list_flatten_int",
[]*cel.Type{listDyn, types.IntType}, listDyn,
cel.BinaryBinding(func(arg1, arg2 ref.Val) ref.Val {
list, ok := arg1.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(arg1)
}
depth, ok := arg2.(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(arg2)
}
flatList, err := flatten(list, int64(depth))
if err != nil {
return types.WrapErr(err)
}
return types.DefaultTypeAdapter.NativeToValue(flatList)
}),
),
// To handle the case where a variable of just `list(T)` is provided at runtime
// with a graceful failure more, disable the type guards since the implementation
// can handle lists which are already flat.
decls.DisableTypeGuards(true),
),
)
}
if lib.version >= 2 {
sortDecl := cel.Function("sort",
append(
templatedOverloads(comparableTypes, func(t *cel.Type) cel.FunctionOpt {
return cel.MemberOverload(
fmt.Sprintf("list_%s_sort", t.TypeName()),
[]*cel.Type{cel.ListType(t)}, cel.ListType(t),
)
}),
cel.SingletonUnaryBinding(
func(arg ref.Val) ref.Val {
list, ok := arg.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(arg)
}
sorted, err := sortList(list)
if err != nil {
return types.WrapErr(err)
}
return sorted
},
// List traits
traits.ListerType,
),
)...,
)
opts = append(opts, sortDecl)
opts = append(opts, cel.Macros(cel.ReceiverMacro("sortBy", 2, sortByMacro)))
opts = append(opts, cel.Function("@sortByAssociatedKeys",
append(
templatedOverloads(comparableTypes, func(u *cel.Type) cel.FunctionOpt {
return cel.MemberOverload(
fmt.Sprintf("list_%s_sortByAssociatedKeys", u.TypeName()),
[]*cel.Type{listType, cel.ListType(u)}, listType,
)
}),
cel.SingletonBinaryBinding(
func(arg1 ref.Val, arg2 ref.Val) ref.Val {
list, ok := arg1.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(arg1)
}
keys, ok := arg2.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(arg2)
}
sorted, err := sortListByAssociatedKeys(list, keys)
if err != nil {
return types.WrapErr(err)
}
return sorted
},
// List traits
traits.ListerType,
),
)...,
))
opts = append(opts, cel.Function("lists.range",
cel.Overload("lists_range",
[]*cel.Type{cel.IntType}, cel.ListType(cel.IntType),
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
n := args[0].(types.Int)
result, err := genRange(n)
if err != nil {
return types.WrapErr(err)
}
return result
}),
),
))
opts = append(opts, cel.Function("reverse",
cel.MemberOverload("list_reverse",
[]*cel.Type{listType}, listType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
list := args[0].(traits.Lister)
result, err := reverseList(list)
if err != nil {
return types.WrapErr(err)
}
return result
}),
),
))
opts = append(opts, cel.Function("distinct",
cel.MemberOverload("list_distinct",
[]*cel.Type{listType}, listType,
cel.UnaryBinding(func(list ref.Val) ref.Val {
result, err := distinctList(list.(traits.Lister))
if err != nil {
return types.WrapErr(err)
}
return result
}),
),
))
}
return opts
}
// ProgramOptions implements the Library interface method.
@@ -73,6 +357,24 @@ func (listsLib) ProgramOptions() []cel.ProgramOption {
return []cel.ProgramOption{}
}
func genRange(n types.Int) (ref.Val, error) {
var newList []ref.Val
for i := types.Int(0); i < n; i++ {
newList = append(newList, i)
}
return types.DefaultTypeAdapter.NativeToValue(newList), nil
}
func reverseList(list traits.Lister) (ref.Val, error) {
var newList []ref.Val
listLength := list.Size().(types.Int)
for i := types.Int(0); i < listLength; i++ {
val := list.Get(listLength - i - 1)
newList = append(newList, val)
}
return types.DefaultTypeAdapter.NativeToValue(newList), nil
}
func slice(list traits.Lister, start, end types.Int) (ref.Val, error) {
listLength := list.Size().(types.Int)
if start < 0 || end < 0 {
@@ -92,3 +394,167 @@ func slice(list traits.Lister, start, end types.Int) (ref.Val, error) {
}
return types.DefaultTypeAdapter.NativeToValue(newList), nil
}
func flatten(list traits.Lister, depth int64) ([]ref.Val, error) {
if depth < 0 {
return nil, fmt.Errorf("level must be non-negative")
}
var newList []ref.Val
iter := list.Iterator()
for iter.HasNext() == types.True {
val := iter.Next()
nestedList, isList := val.(traits.Lister)
if !isList || depth == 0 {
newList = append(newList, val)
continue
} else {
flattenedList, err := flatten(nestedList, depth-1)
if err != nil {
return nil, err
}
newList = append(newList, flattenedList...)
}
}
return newList, nil
}
func sortList(list traits.Lister) (ref.Val, error) {
return sortListByAssociatedKeys(list, list)
}
// Internal function used for the implementation of sort() and sortBy().
//
// Sorts a list of arbitrary elements, according to the order produced by sorting
// another list of comparable elements. If the element type of the keys is not
// comparable or the element types are not the same, the function will produce an error.
//
// <list(T)>.@sortByAssociatedKeys(<list(U)>) -> <list(T)>
// U in {int, uint, double, bool, duration, timestamp, string, bytes}
//
// Example:
//
// ["foo", "bar", "baz"].@sortByAssociatedKeys([3, 1, 2]) // return ["bar", "baz", "foo"]
func sortListByAssociatedKeys(list, keys traits.Lister) (ref.Val, error) {
listLength := list.Size().(types.Int)
keysLength := keys.Size().(types.Int)
if listLength != keysLength {
return nil, fmt.Errorf(
"@sortByAssociatedKeys() expected a list of the same size as the associated keys list, but got %d and %d elements respectively",
listLength,
keysLength,
)
}
if listLength == 0 {
return list, nil
}
elem := keys.Get(types.IntZero)
if _, ok := elem.(traits.Comparer); !ok {
return nil, fmt.Errorf("list elements must be comparable")
}
sortedIndices := make([]ref.Val, 0, listLength)
for i := types.IntZero; i < listLength; i++ {
if keys.Get(i).Type() != elem.Type() {
return nil, fmt.Errorf("list elements must have the same type")
}
sortedIndices = append(sortedIndices, i)
}
sort.Slice(sortedIndices, func(i, j int) bool {
iKey := keys.Get(sortedIndices[i])
jKey := keys.Get(sortedIndices[j])
return iKey.(traits.Comparer).Compare(jKey) == types.IntNegOne
})
sorted := make([]ref.Val, 0, listLength)
for _, sortedIdx := range sortedIndices {
sorted = append(sorted, list.Get(sortedIdx))
}
return types.DefaultTypeAdapter.NativeToValue(sorted), nil
}
// sortByMacro transforms an expression like:
//
// mylistExpr.sortBy(e, -math.abs(e))
//
// into something equivalent to:
//
// cel.bind(
// __sortBy_input__,
// myListExpr,
// __sortBy_input__.@sortByAssociatedKeys(__sortBy_input__.map(e, -math.abs(e))
// )
func sortByMacro(meh cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) {
varIdent := meh.NewIdent("@__sortBy_input__")
varName := varIdent.AsIdent()
targetKind := target.Kind()
if targetKind != ast.ListKind &&
targetKind != ast.SelectKind &&
targetKind != ast.IdentKind &&
targetKind != ast.ComprehensionKind && targetKind != ast.CallKind {
return nil, meh.NewError(target.ID(), fmt.Sprintf("sortBy can only be applied to a list, identifier, comprehension, call or select expression"))
}
mapCompr, err := parser.MakeMap(meh, meh.Copy(varIdent), args)
if err != nil {
return nil, err
}
callExpr := meh.NewMemberCall("@sortByAssociatedKeys",
meh.Copy(varIdent),
mapCompr,
)
bindExpr := meh.NewComprehension(
meh.NewList(),
"#unused",
varName,
target,
meh.NewLiteral(types.False),
varIdent,
callExpr,
)
return bindExpr, nil
}
func distinctList(list traits.Lister) (ref.Val, error) {
listLength := list.Size().(types.Int)
if listLength == 0 {
return list, nil
}
uniqueList := make([]ref.Val, 0, listLength)
for i := types.IntZero; i < listLength; i++ {
val := list.Get(i)
seen := false
for j := types.IntZero; j < types.Int(len(uniqueList)); j++ {
if i == j {
continue
}
other := uniqueList[j]
if val.Equal(other) == types.True {
seen = true
break
}
}
if !seen {
uniqueList = append(uniqueList, val)
}
}
return types.DefaultTypeAdapter.NativeToValue(uniqueList), nil
}
func templatedOverloads(types []*cel.Type, template func(t *cel.Type) cel.FunctionOpt) []cel.FunctionOpt {
overloads := make([]cel.FunctionOpt, len(types))
for i, t := range types {
overloads[i] = template(t)
}
return overloads
}

View File

@@ -16,6 +16,7 @@ package ext
import (
"fmt"
"math"
"strings"
"github.com/google/cel-go/cel"
@@ -86,28 +87,312 @@ import (
// math.least('string') // parse error
// math.least(a, b) // check-time error if a or b is non-numeric
// math.least(dyn('string')) // runtime error
func Math() cel.EnvOption {
return cel.Lib(mathLib{})
//
// # Math.BitOr
//
// Introduced at version: 1
//
// Performs a bitwise-OR operation over two int or uint values.
//
// math.bitOr(<int>, <int>) -> <int>
// math.bitOr(<uint>, <uint>) -> <uint>
//
// Examples:
//
// math.bitOr(1u, 2u) // returns 3u
// math.bitOr(-2, -4) // returns -2
//
// # Math.BitAnd
//
// Introduced at version: 1
//
// Performs a bitwise-AND operation over two int or uint values.
//
// math.bitAnd(<int>, <int>) -> <int>
// math.bitAnd(<uint>, <uint>) -> <uint>
//
// Examples:
//
// math.bitAnd(3u, 2u) // return 2u
// math.bitAnd(3, 5) // returns 3
// math.bitAnd(-3, -5) // returns -7
//
// # Math.BitXor
//
// Introduced at version: 1
//
// math.bitXor(<int>, <int>) -> <int>
// math.bitXor(<uint>, <uint>) -> <uint>
//
// Performs a bitwise-XOR operation over two int or uint values.
//
// Examples:
//
// math.bitXor(3u, 5u) // returns 6u
// math.bitXor(1, 3) // returns 2
//
// # Math.BitNot
//
// Introduced at version: 1
//
// Function which accepts a single int or uint and performs a bitwise-NOT
// ones-complement of the given binary value.
//
// math.bitNot(<int>) -> <int>
// math.bitNot(<uint>) -> <uint>
//
// Examples
//
// math.bitNot(1) // returns -1
// math.bitNot(-1) // return 0
// math.bitNot(0u) // returns 18446744073709551615u
//
// # Math.BitShiftLeft
//
// Introduced at version: 1
//
// Perform a left shift of bits on the first parameter, by the amount of bits
// specified in the second parameter. The first parameter is either a uint or
// an int. The second parameter must be an int.
//
// When the second parameter is 64 or greater, 0 will be always be returned
// since the number of bits shifted is greater than or equal to the total bit
// length of the number being shifted. Negative valued bit shifts will result
// in a runtime error.
//
// math.bitShiftLeft(<int>, <int>) -> <int>
// math.bitShiftLeft(<uint>, <int>) -> <uint>
//
// Examples
//
// math.bitShiftLeft(1, 2) // returns 4
// math.bitShiftLeft(-1, 2) // returns -4
// math.bitShiftLeft(1u, 2) // return 4u
// math.bitShiftLeft(1u, 200) // returns 0u
//
// # Math.BitShiftRight
//
// Introduced at version: 1
//
// Perform a right shift of bits on the first parameter, by the amount of bits
// specified in the second parameter. The first parameter is either a uint or
// an int. The second parameter must be an int.
//
// When the second parameter is 64 or greater, 0 will always be returned since
// the number of bits shifted is greater than or equal to the total bit length
// of the number being shifted. Negative valued bit shifts will result in a
// runtime error.
//
// The sign bit extension will not be preserved for this operation: vacant bits
// on the left are filled with 0.
//
// math.bitShiftRight(<int>, <int>) -> <int>
// math.bitShiftRight(<uint>, <int>) -> <uint>
//
// Examples
//
// math.bitShiftRight(1024, 2) // returns 256
// math.bitShiftRight(1024u, 2) // returns 256u
// math.bitShiftRight(1024u, 64) // returns 0u
//
// # Math.Ceil
//
// Introduced at version: 1
//
// Compute the ceiling of a double value.
//
// math.ceil(<double>) -> <double>
//
// Examples:
//
// math.ceil(1.2) // returns 2.0
// math.ceil(-1.2) // returns -1.0
//
// # Math.Floor
//
// Introduced at version: 1
//
// Compute the floor of a double value.
//
// math.floor(<double>) -> <double>
//
// Examples:
//
// math.floor(1.2) // returns 1.0
// math.floor(-1.2) // returns -2.0
//
// # Math.Round
//
// Introduced at version: 1
//
// Rounds the double value to the nearest whole number with ties rounding away
// from zero, e.g. 1.5 -> 2.0, -1.5 -> -2.0.
//
// math.round(<double>) -> <double>
//
// Examples:
//
// math.round(1.2) // returns 1.0
// math.round(1.5) // returns 2.0
// math.round(-1.5) // returns -2.0
//
// # Math.Trunc
//
// Introduced at version: 1
//
// Truncates the fractional portion of the double value.
//
// math.trunc(<double>) -> <double>
//
// Examples:
//
// math.trunc(-1.3) // returns -1.0
// math.trunc(1.3) // returns 1.0
//
// # Math.Abs
//
// Introduced at version: 1
//
// Returns the absolute value of the numeric type provided as input. If the
// value is NaN, the output is NaN. If the input is int64 min, the function
// will result in an overflow error.
//
// math.abs(<double>) -> <double>
// math.abs(<int>) -> <int>
// math.abs(<uint>) -> <uint>
//
// Examples:
//
// math.abs(-1) // returns 1
// math.abs(1) // returns 1
// math.abs(-9223372036854775808) // overflow error
//
// # Math.Sign
//
// Introduced at version: 1
//
// Returns the sign of the numeric type, either -1, 0, 1 as an int, double, or
// uint depending on the overload. For floating point values, if NaN is
// provided as input, the output is also NaN. The implementation does not
// differentiate between positive and negative zero.
//
// math.sign(<double>) -> <double>
// math.sign(<int>) -> <int>
// math.sign(<uint>) -> <uint>
//
// Examples:
//
// math.sign(-42) // returns -1
// math.sign(0) // returns 0
// math.sign(42) // returns 1
//
// # Math.IsInf
//
// Introduced at version: 1
//
// Returns true if the input double value is -Inf or +Inf.
//
// math.isInf(<double>) -> <bool>
//
// Examples:
//
// math.isInf(1.0/0.0) // returns true
// math.isInf(1.2) // returns false
//
// # Math.IsNaN
//
// Introduced at version: 1
//
// Returns true if the input double value is NaN, false otherwise.
//
// math.isNaN(<double>) -> <bool>
//
// Examples:
//
// math.isNaN(0.0/0.0) // returns true
// math.isNaN(1.2) // returns false
//
// # Math.IsFinite
//
// Introduced at version: 1
//
// Returns true if the value is a finite number. Equivalent in behavior to:
// !math.isNaN(double) && !math.isInf(double)
//
// math.isFinite(<double>) -> <bool>
//
// Examples:
//
// math.isFinite(0.0/0.0) // returns false
// math.isFinite(1.2) // returns true
func Math(options ...MathOption) cel.EnvOption {
m := &mathLib{version: math.MaxUint32}
for _, o := range options {
m = o(m)
}
return cel.Lib(m)
}
const (
mathNamespace = "math"
leastMacro = "least"
greatestMacro = "greatest"
minFunc = "math.@min"
maxFunc = "math.@max"
// Min-max functions
minFunc = "math.@min"
maxFunc = "math.@max"
// Rounding functions
ceilFunc = "math.ceil"
floorFunc = "math.floor"
roundFunc = "math.round"
truncFunc = "math.trunc"
// Floating point helper functions
isInfFunc = "math.isInf"
isNanFunc = "math.isNaN"
isFiniteFunc = "math.isFinite"
// Signedness functions
absFunc = "math.abs"
signFunc = "math.sign"
// Bitwise functions
bitAndFunc = "math.bitAnd"
bitOrFunc = "math.bitOr"
bitXorFunc = "math.bitXor"
bitNotFunc = "math.bitNot"
bitShiftLeftFunc = "math.bitShiftLeft"
bitShiftRightFunc = "math.bitShiftRight"
)
type mathLib struct{}
var (
errIntOverflow = types.NewErr("integer overflow")
)
// MathOption declares a functional operator for configuring math extensions.
type MathOption func(*mathLib) *mathLib
// MathVersion sets the library version for math extensions.
func MathVersion(version uint32) MathOption {
return func(lib *mathLib) *mathLib {
lib.version = version
return lib
}
}
type mathLib struct {
version uint32
}
// LibraryName implements the SingletonLibrary interface method.
func (mathLib) LibraryName() string {
func (*mathLib) LibraryName() string {
return "cel.lib.ext.math"
}
// CompileOptions implements the Library interface method.
func (mathLib) CompileOptions() []cel.EnvOption {
return []cel.EnvOption{
func (lib *mathLib) CompileOptions() []cel.EnvOption {
opts := []cel.EnvOption{
cel.Macros(
// math.least(num, ...)
cel.ReceiverVarArgMacro(leastMacro, mathLeast),
@@ -179,10 +464,95 @@ func (mathLib) CompileOptions() []cel.EnvOption {
cel.UnaryBinding(maxList)),
),
}
if lib.version >= 1 {
opts = append(opts,
// Rounding function declarations
cel.Function(ceilFunc,
cel.Overload("math_ceil_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(ceil))),
cel.Function(floorFunc,
cel.Overload("math_floor_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(floor))),
cel.Function(roundFunc,
cel.Overload("math_round_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(round))),
cel.Function(truncFunc,
cel.Overload("math_trunc_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(trunc))),
// Floating point helpers
cel.Function(isInfFunc,
cel.Overload("math_isInf_double", []*cel.Type{cel.DoubleType}, cel.BoolType,
cel.UnaryBinding(isInf))),
cel.Function(isNanFunc,
cel.Overload("math_isNaN_double", []*cel.Type{cel.DoubleType}, cel.BoolType,
cel.UnaryBinding(isNaN))),
cel.Function(isFiniteFunc,
cel.Overload("math_isFinite_double", []*cel.Type{cel.DoubleType}, cel.BoolType,
cel.UnaryBinding(isFinite))),
// Signedness functions
cel.Function(absFunc,
cel.Overload("math_abs_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(absDouble)),
cel.Overload("math_abs_int", []*cel.Type{cel.IntType}, cel.IntType,
cel.UnaryBinding(absInt)),
cel.Overload("math_abs_uint", []*cel.Type{cel.UintType}, cel.UintType,
cel.UnaryBinding(identity)),
),
cel.Function(signFunc,
cel.Overload("math_sign_double", []*cel.Type{cel.DoubleType}, cel.DoubleType,
cel.UnaryBinding(sign)),
cel.Overload("math_sign_int", []*cel.Type{cel.IntType}, cel.IntType,
cel.UnaryBinding(sign)),
cel.Overload("math_sign_uint", []*cel.Type{cel.UintType}, cel.UintType,
cel.UnaryBinding(sign)),
),
// Bitwise operator declarations
cel.Function(bitAndFunc,
cel.Overload("math_bitAnd_int_int", []*cel.Type{cel.IntType, cel.IntType}, cel.IntType,
cel.BinaryBinding(bitAndPairInt)),
cel.Overload("math_bitAnd_uint_uint", []*cel.Type{cel.UintType, cel.UintType}, cel.UintType,
cel.BinaryBinding(bitAndPairUint)),
),
cel.Function(bitOrFunc,
cel.Overload("math_bitOr_int_int", []*cel.Type{cel.IntType, cel.IntType}, cel.IntType,
cel.BinaryBinding(bitOrPairInt)),
cel.Overload("math_bitOr_uint_uint", []*cel.Type{cel.UintType, cel.UintType}, cel.UintType,
cel.BinaryBinding(bitOrPairUint)),
),
cel.Function(bitXorFunc,
cel.Overload("math_bitXor_int_int", []*cel.Type{cel.IntType, cel.IntType}, cel.IntType,
cel.BinaryBinding(bitXorPairInt)),
cel.Overload("math_bitXor_uint_uint", []*cel.Type{cel.UintType, cel.UintType}, cel.UintType,
cel.BinaryBinding(bitXorPairUint)),
),
cel.Function(bitNotFunc,
cel.Overload("math_bitNot_int_int", []*cel.Type{cel.IntType}, cel.IntType,
cel.UnaryBinding(bitNotInt)),
cel.Overload("math_bitNot_uint_uint", []*cel.Type{cel.UintType}, cel.UintType,
cel.UnaryBinding(bitNotUint)),
),
cel.Function(bitShiftLeftFunc,
cel.Overload("math_bitShiftLeft_int_int", []*cel.Type{cel.IntType, cel.IntType}, cel.IntType,
cel.BinaryBinding(bitShiftLeftIntInt)),
cel.Overload("math_bitShiftLeft_uint_int", []*cel.Type{cel.UintType, cel.IntType}, cel.UintType,
cel.BinaryBinding(bitShiftLeftUintInt)),
),
cel.Function(bitShiftRightFunc,
cel.Overload("math_bitShiftRight_int_int", []*cel.Type{cel.IntType, cel.IntType}, cel.IntType,
cel.BinaryBinding(bitShiftRightIntInt)),
cel.Overload("math_bitShiftRight_uint_int", []*cel.Type{cel.UintType, cel.IntType}, cel.UintType,
cel.BinaryBinding(bitShiftRightUintInt)),
),
)
}
return opts
}
// ProgramOptions implements the Library interface method.
func (mathLib) ProgramOptions() []cel.ProgramOption {
func (*mathLib) ProgramOptions() []cel.ProgramOption {
return []cel.ProgramOption{}
}
@@ -194,7 +564,7 @@ func mathLeast(meh cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.
case 0:
return nil, meh.NewError(target.ID(), "math.least() requires at least one argument")
case 1:
if isListLiteralWithValidArgs(args[0]) || isValidArgType(args[0]) {
if isListLiteralWithNumericArgs(args[0]) || isNumericArgType(args[0]) {
return meh.NewCall(minFunc, args[0]), nil
}
return nil, meh.NewError(args[0].ID(), "math.least() invalid single argument value")
@@ -221,7 +591,7 @@ func mathGreatest(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (a
case 0:
return nil, mef.NewError(target.ID(), "math.greatest() requires at least one argument")
case 1:
if isListLiteralWithValidArgs(args[0]) || isValidArgType(args[0]) {
if isListLiteralWithNumericArgs(args[0]) || isNumericArgType(args[0]) {
return mef.NewCall(maxFunc, args[0]), nil
}
return nil, mef.NewError(args[0].ID(), "math.greatest() invalid single argument value")
@@ -244,6 +614,165 @@ func identity(val ref.Val) ref.Val {
return val
}
func ceil(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Double(math.Ceil(float64(v)))
}
func floor(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Double(math.Floor(float64(v)))
}
func round(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Double(math.Round(float64(v)))
}
func trunc(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Double(math.Trunc(float64(v)))
}
func isInf(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Bool(math.IsInf(float64(v), 0))
}
func isFinite(val ref.Val) ref.Val {
v := float64(val.(types.Double))
return types.Bool(!math.IsInf(v, 0) && !math.IsNaN(v))
}
func isNaN(val ref.Val) ref.Val {
v := val.(types.Double)
return types.Bool(math.IsNaN(float64(v)))
}
func absDouble(val ref.Val) ref.Val {
v := float64(val.(types.Double))
return types.Double(math.Abs(v))
}
func absInt(val ref.Val) ref.Val {
v := int64(val.(types.Int))
if v == math.MinInt64 {
return errIntOverflow
}
if v >= 0 {
return val
}
return -types.Int(v)
}
func sign(val ref.Val) ref.Val {
switch v := val.(type) {
case types.Double:
if isNaN(v) == types.True {
return v
}
zero := types.Double(0)
if v > zero {
return types.Double(1)
}
if v < zero {
return types.Double(-1)
}
return zero
case types.Int:
return v.Compare(types.IntZero)
case types.Uint:
if v == types.Uint(0) {
return types.Uint(0)
}
return types.Uint(1)
default:
return maybeSuffixError(val, "math.sign")
}
}
func bitAndPairInt(first, second ref.Val) ref.Val {
l := first.(types.Int)
r := second.(types.Int)
return l & r
}
func bitAndPairUint(first, second ref.Val) ref.Val {
l := first.(types.Uint)
r := second.(types.Uint)
return l & r
}
func bitOrPairInt(first, second ref.Val) ref.Val {
l := first.(types.Int)
r := second.(types.Int)
return l | r
}
func bitOrPairUint(first, second ref.Val) ref.Val {
l := first.(types.Uint)
r := second.(types.Uint)
return l | r
}
func bitXorPairInt(first, second ref.Val) ref.Val {
l := first.(types.Int)
r := second.(types.Int)
return l ^ r
}
func bitXorPairUint(first, second ref.Val) ref.Val {
l := first.(types.Uint)
r := second.(types.Uint)
return l ^ r
}
func bitNotInt(value ref.Val) ref.Val {
v := value.(types.Int)
return ^v
}
func bitNotUint(value ref.Val) ref.Val {
v := value.(types.Uint)
return ^v
}
func bitShiftLeftIntInt(value, bits ref.Val) ref.Val {
v := value.(types.Int)
bs := bits.(types.Int)
if bs < types.IntZero {
return types.NewErr("math.bitShiftLeft() negative offset: %d", bs)
}
return v << bs
}
func bitShiftLeftUintInt(value, bits ref.Val) ref.Val {
v := value.(types.Uint)
bs := bits.(types.Int)
if bs < types.IntZero {
return types.NewErr("math.bitShiftLeft() negative offset: %d", bs)
}
return v << bs
}
func bitShiftRightIntInt(value, bits ref.Val) ref.Val {
v := value.(types.Int)
bs := bits.(types.Int)
if bs < types.IntZero {
return types.NewErr("math.bitShiftRight() negative offset: %d", bs)
}
return types.Int(types.Uint(v) >> bs)
}
func bitShiftRightUintInt(value, bits ref.Val) ref.Val {
v := value.(types.Uint)
bs := bits.(types.Int)
if bs < types.IntZero {
return types.NewErr("math.bitShiftRight() negative offset: %d", bs)
}
return v >> bs
}
func minPair(first, second ref.Val) ref.Val {
cmp, ok := first.(traits.Comparer)
if !ok {
@@ -321,13 +850,13 @@ func checkInvalidArgs(meh cel.MacroExprFactory, funcName string, args []ast.Expr
}
func checkInvalidArgLiteral(funcName string, arg ast.Expr) error {
if !isValidArgType(arg) {
if !isNumericArgType(arg) {
return fmt.Errorf("%s simple literal arguments must be numeric", funcName)
}
return nil
}
func isValidArgType(arg ast.Expr) bool {
func isNumericArgType(arg ast.Expr) bool {
switch arg.Kind() {
case ast.LiteralKind:
c := ref.Val(arg.AsLiteral())
@@ -344,7 +873,7 @@ func isValidArgType(arg ast.Expr) bool {
}
}
func isListLiteralWithValidArgs(arg ast.Expr) bool {
func isListLiteralWithNumericArgs(arg ast.Expr) bool {
switch arg.Kind() {
case ast.ListKind:
list := arg.AsList()
@@ -352,7 +881,7 @@ func isListLiteralWithValidArgs(arg ast.Expr) bool {
return false
}
for _, e := range list.Elements() {
if !isValidArgType(e) {
if !isNumericArgType(e) {
return false
}
}

View File

@@ -15,6 +15,7 @@
package ext
import (
"errors"
"fmt"
"reflect"
"strings"
@@ -77,12 +78,45 @@ var (
// same advice holds if you are using custom type adapters and type providers. The native type
// provider composes over whichever type adapter and provider is configured in the cel.Env at
// the time that it is invoked.
func NativeTypes(refTypes ...any) cel.EnvOption {
//
// There is also the possibility to rename the fields of native structs by setting the `cel` tag
// for fields you want to override. In order to enable this feature, pass in the `EnableStructTag`
// option. Here is an example to see it in action:
//
// ```go
// package identity
//
// type Account struct {
// ID int
// OwnerName string `cel:"owner"`
// }
//
// ```
//
// The `OwnerName` field is now accessible in CEL via `owner`, e.g. `identity.Account{owner: 'bob'}`.
// In case there are duplicated field names in the struct, an error will be returned.
func NativeTypes(args ...any) cel.EnvOption {
return func(env *cel.Env) (*cel.Env, error) {
tp, err := newNativeTypeProvider(env.CELTypeAdapter(), env.CELTypeProvider(), refTypes...)
nativeTypes := make([]any, 0, len(args))
tpOptions := nativeTypeOptions{}
for _, v := range args {
switch v := v.(type) {
case NativeTypesOption:
err := v(&tpOptions)
if err != nil {
return nil, err
}
default:
nativeTypes = append(nativeTypes, v)
}
}
tp, err := newNativeTypeProvider(tpOptions, env.CELTypeAdapter(), env.CELTypeProvider(), nativeTypes...)
if err != nil {
return nil, err
}
env, err = cel.CustomTypeAdapter(tp)(env)
if err != nil {
return nil, err
@@ -91,12 +125,79 @@ func NativeTypes(refTypes ...any) cel.EnvOption {
}
}
func newNativeTypeProvider(adapter types.Adapter, provider types.Provider, refTypes ...any) (*nativeTypeProvider, error) {
// NativeTypesOption is a functional interface for configuring handling of native types.
type NativeTypesOption func(*nativeTypeOptions) error
// NativeTypesFieldNameHandler is a handler for mapping a reflect.StructField to a CEL field name.
// This can be used to override the default Go struct field to CEL field name mapping.
type NativeTypesFieldNameHandler = func(field reflect.StructField) string
func fieldNameByTag(structTagToParse string) func(field reflect.StructField) string {
return func(field reflect.StructField) string {
tag, found := field.Tag.Lookup(structTagToParse)
if found {
splits := strings.Split(tag, ",")
if len(splits) > 0 {
// We make the assumption that the leftmost entry in the tag is the name.
// This seems to be true for most tags that have the concept of a name/key, such as:
// https://pkg.go.dev/encoding/xml#Marshal
// https://pkg.go.dev/encoding/json#Marshal
// https://pkg.go.dev/go.mongodb.org/mongo-driver/bson#hdr-Structs
// https://pkg.go.dev/gopkg.in/yaml.v2#Marshal
name := splits[0]
return name
}
}
return field.Name
}
}
type nativeTypeOptions struct {
// fieldNameHandler controls how CEL should perform struct field renames.
// This is most commonly used for switching to parsing based off the struct field tag,
// such as "cel" or "json".
fieldNameHandler NativeTypesFieldNameHandler
}
// ParseStructTags configures if native types field names should be overridable by CEL struct tags.
// This is equivalent to ParseStructTag("cel")
func ParseStructTags(enabled bool) NativeTypesOption {
return func(ntp *nativeTypeOptions) error {
if enabled {
ntp.fieldNameHandler = fieldNameByTag("cel")
} else {
ntp.fieldNameHandler = nil
}
return nil
}
}
// ParseStructTag configures the struct tag to parse. The 0th item in the tag is used as the name of the CEL field.
// For example:
// If the tag to parse is "cel" and the struct field has tag cel:"foo", the CEL struct field will be "foo".
// If the tag to parse is "json" and the struct field has tag json:"foo,omitempty", the CEL struct field will be "foo".
func ParseStructTag(tag string) NativeTypesOption {
return func(ntp *nativeTypeOptions) error {
ntp.fieldNameHandler = fieldNameByTag(tag)
return nil
}
}
// ParseStructField configures how to parse Go struct fields. It can be used to customize struct field parsing.
func ParseStructField(handler NativeTypesFieldNameHandler) NativeTypesOption {
return func(ntp *nativeTypeOptions) error {
ntp.fieldNameHandler = handler
return nil
}
}
func newNativeTypeProvider(tpOptions nativeTypeOptions, adapter types.Adapter, provider types.Provider, refTypes ...any) (*nativeTypeProvider, error) {
nativeTypes := make(map[string]*nativeType, len(refTypes))
for _, refType := range refTypes {
switch rt := refType.(type) {
case reflect.Type:
result, err := newNativeTypes(rt)
result, err := newNativeTypes(tpOptions.fieldNameHandler, rt)
if err != nil {
return nil, err
}
@@ -104,7 +205,7 @@ func newNativeTypeProvider(adapter types.Adapter, provider types.Provider, refTy
nativeTypes[result[idx].TypeName()] = result[idx]
}
case reflect.Value:
result, err := newNativeTypes(rt.Type())
result, err := newNativeTypes(tpOptions.fieldNameHandler, rt.Type())
if err != nil {
return nil, err
}
@@ -119,6 +220,7 @@ func newNativeTypeProvider(adapter types.Adapter, provider types.Provider, refTy
nativeTypes: nativeTypes,
baseAdapter: adapter,
baseProvider: provider,
options: tpOptions,
}, nil
}
@@ -126,6 +228,7 @@ type nativeTypeProvider struct {
nativeTypes map[string]*nativeType
baseAdapter types.Adapter
baseProvider types.Provider
options nativeTypeOptions
}
// EnumValue proxies to the types.Provider configured at the times the NativeTypes
@@ -155,6 +258,14 @@ func (tp *nativeTypeProvider) FindStructType(typeName string) (*types.Type, bool
return tp.baseProvider.FindStructType(typeName)
}
func toFieldName(fieldNameHandler NativeTypesFieldNameHandler, f reflect.StructField) string {
if fieldNameHandler == nil {
return f.Name
}
return fieldNameHandler(f)
}
// FindStructFieldNames looks up the type definition first from the native types, then from
// the backing provider type set. If found, a set of field names corresponding to the type
// will be returned.
@@ -163,7 +274,7 @@ func (tp *nativeTypeProvider) FindStructFieldNames(typeName string) ([]string, b
fieldCount := t.refType.NumField()
fields := make([]string, fieldCount)
for i := 0; i < fieldCount; i++ {
fields[i] = t.refType.Field(i).Name
fields[i] = toFieldName(tp.options.fieldNameHandler, t.refType.Field(i))
}
return fields, true
}
@@ -192,13 +303,13 @@ func (tp *nativeTypeProvider) FindStructFieldType(typeName, fieldName string) (*
Type: celType,
IsSet: func(obj any) bool {
refVal := reflect.Indirect(reflect.ValueOf(obj))
refField := refVal.FieldByName(fieldName)
refField := refVal.FieldByName(refField.Name)
return !refField.IsZero()
},
GetFrom: func(obj any) (any, error) {
refVal := reflect.Indirect(reflect.ValueOf(obj))
refField := refVal.FieldByName(fieldName)
return getFieldValue(tp, refField), nil
refField := refVal.FieldByName(refField.Name)
return getFieldValue(refField), nil
},
}, true
}
@@ -249,6 +360,9 @@ func (tp *nativeTypeProvider) NativeToValue(val any) ref.Val {
case []byte:
return tp.baseAdapter.NativeToValue(val)
default:
if refVal.Type().Elem() == reflect.TypeOf(byte(0)) {
return tp.baseAdapter.NativeToValue(val)
}
return types.NewDynamicList(tp, val)
}
case reflect.Map:
@@ -259,7 +373,7 @@ func (tp *nativeTypeProvider) NativeToValue(val any) ref.Val {
time.Time:
return tp.baseAdapter.NativeToValue(val)
default:
return newNativeObject(tp, val, rawVal)
return tp.newNativeObject(val, rawVal)
}
default:
return tp.baseAdapter.NativeToValue(val)
@@ -319,13 +433,13 @@ func convertToCelType(refType reflect.Type) (*cel.Type, bool) {
return nil, false
}
func newNativeObject(adapter types.Adapter, val any, refValue reflect.Value) ref.Val {
valType, err := newNativeType(refValue.Type())
func (tp *nativeTypeProvider) newNativeObject(val any, refValue reflect.Value) ref.Val {
valType, err := newNativeType(tp.options.fieldNameHandler, refValue.Type())
if err != nil {
return types.NewErr(err.Error())
}
return &nativeObj{
Adapter: adapter,
Adapter: tp,
val: val,
valType: valType,
refValue: refValue,
@@ -372,12 +486,13 @@ func (o *nativeObj) ConvertToNative(typeDesc reflect.Type) (any, error) {
if !fieldValue.IsValid() || fieldValue.IsZero() {
continue
}
fieldName := toFieldName(o.valType.fieldNameHandler, fieldType)
fieldCELVal := o.NativeToValue(fieldValue.Interface())
fieldJSONVal, err := fieldCELVal.ConvertToNative(jsonValueType)
if err != nil {
return nil, err
}
fields[fieldType.Name] = fieldJSONVal.(*structpb.Value)
fields[fieldName] = fieldJSONVal.(*structpb.Value)
}
return &structpb.Struct{Fields: fields}, nil
}
@@ -469,8 +584,8 @@ func (o *nativeObj) Value() any {
return o.val
}
func newNativeTypes(rawType reflect.Type) ([]*nativeType, error) {
nt, err := newNativeType(rawType)
func newNativeTypes(fieldNameHandler NativeTypesFieldNameHandler, rawType reflect.Type) ([]*nativeType, error) {
nt, err := newNativeType(fieldNameHandler, rawType)
if err != nil {
return nil, err
}
@@ -489,7 +604,7 @@ func newNativeTypes(rawType reflect.Type) ([]*nativeType, error) {
return
}
alreadySeen[t.String()] = struct{}{}
nt, ntErr := newNativeType(t)
nt, ntErr := newNativeType(fieldNameHandler, t)
if ntErr != nil {
err = ntErr
return
@@ -505,7 +620,11 @@ func newNativeTypes(rawType reflect.Type) ([]*nativeType, error) {
return result, err
}
func newNativeType(rawType reflect.Type) (*nativeType, error) {
var (
errDuplicatedFieldName = errors.New("field name already exists in struct")
)
func newNativeType(fieldNameHandler NativeTypesFieldNameHandler, rawType reflect.Type) (*nativeType, error) {
refType := rawType
if refType.Kind() == reflect.Pointer {
refType = refType.Elem()
@@ -513,15 +632,34 @@ func newNativeType(rawType reflect.Type) (*nativeType, error) {
if !isValidObjectType(refType) {
return nil, fmt.Errorf("unsupported reflect.Type %v, must be reflect.Struct", rawType)
}
// Since naming collisions can only happen with struct tag parsing, we only check for them if it is enabled.
if fieldNameHandler != nil {
fieldNames := make(map[string]struct{})
for idx := 0; idx < refType.NumField(); idx++ {
field := refType.Field(idx)
fieldName := toFieldName(fieldNameHandler, field)
if _, found := fieldNames[fieldName]; found {
return nil, fmt.Errorf("invalid field name `%s` in struct `%s`: %w", fieldName, refType.Name(), errDuplicatedFieldName)
} else {
fieldNames[fieldName] = struct{}{}
}
}
}
return &nativeType{
typeName: fmt.Sprintf("%s.%s", simplePkgAlias(refType.PkgPath()), refType.Name()),
refType: refType,
typeName: fmt.Sprintf("%s.%s", simplePkgAlias(refType.PkgPath()), refType.Name()),
refType: refType,
fieldNameHandler: fieldNameHandler,
}, nil
}
type nativeType struct {
typeName string
refType reflect.Type
typeName string
refType reflect.Type
fieldNameHandler NativeTypesFieldNameHandler
}
// ConvertToNative implements ref.Val.ConvertToNative.
@@ -569,9 +707,26 @@ func (t *nativeType) Value() any {
return t.typeName
}
// fieldByName returns the corresponding reflect.StructField for the give name either by matching
// field tag or field name.
func (t *nativeType) fieldByName(fieldName string) (reflect.StructField, bool) {
if t.fieldNameHandler == nil {
return t.refType.FieldByName(fieldName)
}
for i := 0; i < t.refType.NumField(); i++ {
f := t.refType.Field(i)
if toFieldName(t.fieldNameHandler, f) == fieldName {
return f, true
}
}
return reflect.StructField{}, false
}
// hasField returns whether a field name has a corresponding Golang reflect.StructField
func (t *nativeType) hasField(fieldName string) (reflect.StructField, bool) {
f, found := t.refType.FieldByName(fieldName)
f, found := t.fieldByName(fieldName)
if !found || !f.IsExported() || !isSupportedType(f.Type) {
return reflect.StructField{}, false
}
@@ -579,21 +734,16 @@ func (t *nativeType) hasField(fieldName string) (reflect.StructField, bool) {
}
func adaptFieldValue(adapter types.Adapter, refField reflect.Value) ref.Val {
return adapter.NativeToValue(getFieldValue(adapter, refField))
return adapter.NativeToValue(getFieldValue(refField))
}
func getFieldValue(adapter types.Adapter, refField reflect.Value) any {
func getFieldValue(refField reflect.Value) any {
if refField.IsZero() {
switch refField.Kind() {
case reflect.Array, reflect.Slice:
return types.NewDynamicList(adapter, []ref.Val{})
case reflect.Map:
return types.NewDynamicMap(adapter, map[ref.Val]ref.Val{})
case reflect.Struct:
if refField.Type() == timestampType {
return types.Timestamp{Time: time.Unix(0, 0)}
return time.Unix(0, 0)
}
return reflect.New(refField.Type()).Elem().Interface()
case reflect.Pointer:
return reflect.New(refField.Type().Elem()).Interface()
}

View File

@@ -119,7 +119,8 @@ const (
// 'hello mellow'.indexOf('jello') // returns -1
// 'hello mellow'.indexOf('', 2) // returns 2
// 'hello mellow'.indexOf('ello', 2) // returns 7
// 'hello mellow'.indexOf('ello', 20) // error
// 'hello mellow'.indexOf('ello', 20) // returns -1
// 'hello mellow'.indexOf('ello', -1) // error
//
// # Join
//
@@ -155,6 +156,7 @@ const (
// 'hello mellow'.lastIndexOf('ello') // returns 7
// 'hello mellow'.lastIndexOf('jello') // returns -1
// 'hello mellow'.lastIndexOf('ello', 6) // returns 1
// 'hello mellow'.lastIndexOf('ello', 20) // returns -1
// 'hello mellow'.lastIndexOf('ello', -1) // error
//
// # LowerAscii
@@ -520,7 +522,7 @@ func (lib *stringLib) CompileOptions() []cel.EnvOption {
if lib.version >= 3 {
opts = append(opts,
cel.Function("reverse",
cel.MemberOverload("reverse", []*cel.Type{cel.StringType}, cel.StringType,
cel.MemberOverload("string_reverse", []*cel.Type{cel.StringType}, cel.StringType,
cel.UnaryBinding(func(str ref.Val) ref.Val {
s := str.(types.String)
return stringOrError(reverse(string(s)))
@@ -561,9 +563,13 @@ func indexOfOffset(str, substr string, offset int64) (int64, error) {
off := int(offset)
runes := []rune(str)
subrunes := []rune(substr)
if off < 0 || off >= len(runes) {
if off < 0 {
return -1, fmt.Errorf("index out of range: %d", off)
}
// If the offset exceeds the length, return -1 rather than error.
if off >= len(runes) {
return -1, nil
}
for i := off; i < len(runes)-(len(subrunes)-1); i++ {
found := true
for j := 0; j < len(subrunes); j++ {
@@ -594,9 +600,13 @@ func lastIndexOfOffset(str, substr string, offset int64) (int64, error) {
off := int(offset)
runes := []rune(str)
subrunes := []rune(substr)
if off < 0 || off >= len(runes) {
if off < 0 {
return -1, fmt.Errorf("index out of range: %d", off)
}
// If the offset is far greater than the length return -1
if off >= len(runes) {
return -1, nil
}
if off > len(runes)-len(subrunes) {
off = len(runes) - len(subrunes)
}

View File

@@ -17,7 +17,6 @@ package interpreter
import (
"errors"
"fmt"
"sync"
"github.com/google/cel-go/common/types/ref"
)
@@ -167,35 +166,3 @@ type partActivation struct {
func (a *partActivation) UnknownAttributePatterns() []*AttributePattern {
return a.unknowns
}
// varActivation represents a single mutable variable binding.
//
// This activation type should only be used within folds as the fold loop controls the object
// life-cycle.
type varActivation struct {
parent Activation
name string
val ref.Val
}
// Parent implements the Activation interface method.
func (v *varActivation) Parent() Activation {
return v.parent
}
// ResolveName implements the Activation interface method.
func (v *varActivation) ResolveName(name string) (any, bool) {
if name == v.name {
return v.val, true
}
return v.parent.ResolveName(name)
}
var (
// pool of var activations to reduce allocations during folds.
varActivationPool = &sync.Pool{
New: func() any {
return &varActivation{}
},
}
)

View File

@@ -178,10 +178,8 @@ func numericValueEquals(value any, celValue ref.Val) bool {
// NewPartialAttributeFactory returns an AttributeFactory implementation capable of performing
// AttributePattern matches with PartialActivation inputs.
func NewPartialAttributeFactory(container *containers.Container,
adapter types.Adapter,
provider types.Provider) AttributeFactory {
fac := NewAttributeFactory(container, adapter, provider)
func NewPartialAttributeFactory(container *containers.Container, adapter types.Adapter, provider types.Provider, opts ...AttrFactoryOption) AttributeFactory {
fac := NewAttributeFactory(container, adapter, provider, opts...)
return &partialAttributeFactory{
AttributeFactory: fac,
container: container,

View File

@@ -126,21 +126,39 @@ type NamespacedAttribute interface {
Qualifiers() []Qualifier
}
// AttrFactoryOption specifies a functional option for configuring an attribute factory.
type AttrFactoryOption func(*attrFactory) *attrFactory
// EnableErrorOnBadPresenceTest error generation when a presence test or optional field selection
// is performed on a primitive type.
func EnableErrorOnBadPresenceTest(value bool) AttrFactoryOption {
return func(fac *attrFactory) *attrFactory {
fac.errorOnBadPresenceTest = value
return fac
}
}
// NewAttributeFactory returns a default AttributeFactory which is produces Attribute values
// capable of resolving types by simple names and qualify the values using the supported qualifier
// types: bool, int, string, and uint.
func NewAttributeFactory(cont *containers.Container, a types.Adapter, p types.Provider) AttributeFactory {
return &attrFactory{
func NewAttributeFactory(cont *containers.Container, a types.Adapter, p types.Provider, opts ...AttrFactoryOption) AttributeFactory {
fac := &attrFactory{
container: cont,
adapter: a,
provider: p,
}
for _, o := range opts {
fac = o(fac)
}
return fac
}
type attrFactory struct {
container *containers.Container
adapter types.Adapter
provider types.Provider
errorOnBadPresenceTest bool
}
// AbsoluteAttribute refers to a variable value and an optional qualifier path.
@@ -149,12 +167,13 @@ type attrFactory struct {
// resolution rules.
func (r *attrFactory) AbsoluteAttribute(id int64, names ...string) NamespacedAttribute {
return &absoluteAttribute{
id: id,
namespaceNames: names,
qualifiers: []Qualifier{},
adapter: r.adapter,
provider: r.provider,
fac: r,
id: id,
namespaceNames: names,
qualifiers: []Qualifier{},
adapter: r.adapter,
provider: r.provider,
fac: r,
errorOnBadPresenceTest: r.errorOnBadPresenceTest,
}
}
@@ -188,11 +207,12 @@ func (r *attrFactory) MaybeAttribute(id int64, name string) Attribute {
// RelativeAttribute refers to an expression and an optional qualifier path.
func (r *attrFactory) RelativeAttribute(id int64, operand Interpretable) Attribute {
return &relativeAttribute{
id: id,
operand: operand,
qualifiers: []Qualifier{},
adapter: r.adapter,
fac: r,
id: id,
operand: operand,
qualifiers: []Qualifier{},
adapter: r.adapter,
fac: r,
errorOnBadPresenceTest: r.errorOnBadPresenceTest,
}
}
@@ -214,7 +234,7 @@ func (r *attrFactory) NewQualifier(objType *types.Type, qualID int64, val any, o
}, nil
}
}
return newQualifier(r.adapter, qualID, val, opt)
return newQualifier(r.adapter, qualID, val, opt, r.errorOnBadPresenceTest)
}
type absoluteAttribute struct {
@@ -226,6 +246,8 @@ type absoluteAttribute struct {
adapter types.Adapter
provider types.Provider
fac AttributeFactory
errorOnBadPresenceTest bool
}
// ID implements the Attribute interface method.
@@ -514,6 +536,8 @@ type relativeAttribute struct {
qualifiers []Qualifier
adapter types.Adapter
fac AttributeFactory
errorOnBadPresenceTest bool
}
// ID is an implementation of the Attribute interface method.
@@ -577,7 +601,7 @@ func (a *relativeAttribute) String() string {
return fmt.Sprintf("id: %v, operand: %v", a.id, a.operand)
}
func newQualifier(adapter types.Adapter, id int64, v any, opt bool) (Qualifier, error) {
func newQualifier(adapter types.Adapter, id int64, v any, opt, errorOnBadPresenceTest bool) (Qualifier, error) {
var qual Qualifier
switch val := v.(type) {
case Attribute:
@@ -592,71 +616,138 @@ func newQualifier(adapter types.Adapter, id int64, v any, opt bool) (Qualifier,
}, nil
case string:
qual = &stringQualifier{
id: id,
value: val,
celValue: types.String(val),
adapter: adapter,
optional: opt,
id: id,
value: val,
celValue: types.String(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case int:
qual = &intQualifier{
id: id, value: int64(val), celValue: types.Int(val), adapter: adapter, optional: opt,
id: id,
value: int64(val),
celValue: types.Int(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case int32:
qual = &intQualifier{
id: id, value: int64(val), celValue: types.Int(val), adapter: adapter, optional: opt,
id: id,
value: int64(val),
celValue: types.Int(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case int64:
qual = &intQualifier{
id: id, value: val, celValue: types.Int(val), adapter: adapter, optional: opt,
id: id,
value: val,
celValue: types.Int(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case uint:
qual = &uintQualifier{
id: id, value: uint64(val), celValue: types.Uint(val), adapter: adapter, optional: opt,
id: id,
value: uint64(val),
celValue: types.Uint(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case uint32:
qual = &uintQualifier{
id: id, value: uint64(val), celValue: types.Uint(val), adapter: adapter, optional: opt,
id: id,
value: uint64(val),
celValue: types.Uint(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case uint64:
qual = &uintQualifier{
id: id, value: val, celValue: types.Uint(val), adapter: adapter, optional: opt,
id: id,
value: val,
celValue: types.Uint(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case bool:
qual = &boolQualifier{
id: id, value: val, celValue: types.Bool(val), adapter: adapter, optional: opt,
id: id,
value: val,
celValue: types.Bool(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case float32:
qual = &doubleQualifier{
id: id,
value: float64(val),
celValue: types.Double(val),
adapter: adapter,
optional: opt,
id: id,
value: float64(val),
celValue: types.Double(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case float64:
qual = &doubleQualifier{
id: id, value: val, celValue: types.Double(val), adapter: adapter, optional: opt,
id: id,
value: val,
celValue: types.Double(val),
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case types.String:
qual = &stringQualifier{
id: id, value: string(val), celValue: val, adapter: adapter, optional: opt,
id: id,
value: string(val),
celValue: val,
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case types.Int:
qual = &intQualifier{
id: id, value: int64(val), celValue: val, adapter: adapter, optional: opt,
id: id,
value: int64(val),
celValue: val,
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case types.Uint:
qual = &uintQualifier{
id: id, value: uint64(val), celValue: val, adapter: adapter, optional: opt,
id: id,
value: uint64(val),
celValue: val,
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case types.Bool:
qual = &boolQualifier{
id: id, value: bool(val), celValue: val, adapter: adapter, optional: opt,
id: id,
value: bool(val),
celValue: val,
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case types.Double:
qual = &doubleQualifier{
id: id, value: float64(val), celValue: val, adapter: adapter, optional: opt,
id: id,
value: float64(val),
celValue: val,
adapter: adapter,
optional: opt,
errorOnBadPresenceTest: errorOnBadPresenceTest,
}
case *types.Unknown:
qual = &unknownQualifier{id: id, value: val}
@@ -687,11 +778,12 @@ func (q *attrQualifier) IsOptional() bool {
}
type stringQualifier struct {
id int64
value string
celValue ref.Val
adapter types.Adapter
optional bool
id int64
value string
celValue ref.Val
adapter types.Adapter
optional bool
errorOnBadPresenceTest bool
}
// ID is an implementation of the Qualifier interface method.
@@ -774,7 +866,7 @@ func (q *stringQualifier) qualifyInternal(vars Activation, obj any, presenceTest
return obj, true, nil
}
default:
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly, q.errorOnBadPresenceTest)
}
if presenceTest {
return nil, false, nil
@@ -788,11 +880,12 @@ func (q *stringQualifier) Value() ref.Val {
}
type intQualifier struct {
id int64
value int64
celValue ref.Val
adapter types.Adapter
optional bool
id int64
value int64
celValue ref.Val
adapter types.Adapter
optional bool
errorOnBadPresenceTest bool
}
// ID is an implementation of the Qualifier interface method.
@@ -898,7 +991,7 @@ func (q *intQualifier) qualifyInternal(vars Activation, obj any, presenceTest, p
return o[i], true, nil
}
default:
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly, q.errorOnBadPresenceTest)
}
if presenceTest {
return nil, false, nil
@@ -915,11 +1008,12 @@ func (q *intQualifier) Value() ref.Val {
}
type uintQualifier struct {
id int64
value uint64
celValue ref.Val
adapter types.Adapter
optional bool
id int64
value uint64
celValue ref.Val
adapter types.Adapter
optional bool
errorOnBadPresenceTest bool
}
// ID is an implementation of the Qualifier interface method.
@@ -966,7 +1060,7 @@ func (q *uintQualifier) qualifyInternal(vars Activation, obj any, presenceTest,
return obj, true, nil
}
default:
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly, q.errorOnBadPresenceTest)
}
if presenceTest {
return nil, false, nil
@@ -980,11 +1074,12 @@ func (q *uintQualifier) Value() ref.Val {
}
type boolQualifier struct {
id int64
value bool
celValue ref.Val
adapter types.Adapter
optional bool
id int64
value bool
celValue ref.Val
adapter types.Adapter
optional bool
errorOnBadPresenceTest bool
}
// ID is an implementation of the Qualifier interface method.
@@ -1017,7 +1112,7 @@ func (q *boolQualifier) qualifyInternal(vars Activation, obj any, presenceTest,
return obj, true, nil
}
default:
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly, q.errorOnBadPresenceTest)
}
if presenceTest {
return nil, false, nil
@@ -1092,11 +1187,12 @@ func (q *fieldQualifier) Value() ref.Val {
// type may not be known ahead of time and may not conform to the standard types supported as valid
// protobuf map key types.
type doubleQualifier struct {
id int64
value float64
celValue ref.Val
adapter types.Adapter
optional bool
id int64
value float64
celValue ref.Val
adapter types.Adapter
optional bool
errorOnBadPresenceTest bool
}
// ID is an implementation of the Qualifier interface method.
@@ -1120,7 +1216,7 @@ func (q *doubleQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnl
}
func (q *doubleQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly, q.errorOnBadPresenceTest)
}
// Value implements the ConstantQualifier interface
@@ -1226,7 +1322,7 @@ func attrQualifyIfPresent(fac AttributeFactory, vars Activation, obj any, qualAt
// refQualify attempts to convert the value to a CEL value and then uses reflection methods to try and
// apply the qualifier with the option to presence test field accesses before retrieving field values.
func refQualify(adapter types.Adapter, obj any, idx ref.Val, presenceTest, presenceOnly bool) (ref.Val, bool, error) {
func refQualify(adapter types.Adapter, obj any, idx ref.Val, presenceTest, presenceOnly, errorOnBadPresenceTest bool) (ref.Val, bool, error) {
celVal := adapter.NativeToValue(obj)
switch v := celVal.(type) {
case *types.Unknown:
@@ -1283,7 +1379,7 @@ func refQualify(adapter types.Adapter, obj any, idx ref.Val, presenceTest, prese
}
return val, true, nil
default:
if presenceTest {
if presenceTest && !errorOnBadPresenceTest {
return nil, false, nil
}
return nil, false, missingKey(idx)

View File

@@ -16,6 +16,7 @@ package interpreter
import (
"fmt"
"sync"
"github.com/google/cel-go/common/functions"
"github.com/google/cel-go/common/operators"
@@ -96,7 +97,7 @@ type InterpretableCall interface {
Args() []Interpretable
}
// InterpretableConstructor interface for inspecting Interpretable instructions that initialize a list, map
// InterpretableConstructor interface for inspecting Interpretable instructions that initialize a list, map
// or struct.
type InterpretableConstructor interface {
Interpretable
@@ -720,24 +721,31 @@ func (o *evalObj) Eval(ctx Activation) ref.Val {
return types.LabelErrNode(o.id, o.provider.NewValue(o.typeName, fieldVals))
}
// InitVals implements the InterpretableConstructor interface method.
func (o *evalObj) InitVals() []Interpretable {
return o.vals
}
// Type implements the InterpretableConstructor interface method.
func (o *evalObj) Type() ref.Type {
return types.NewObjectTypeValue(o.typeName)
return types.NewObjectType(o.typeName)
}
type evalFold struct {
id int64
accuVar string
iterVar string
iterRange Interpretable
accu Interpretable
cond Interpretable
step Interpretable
result Interpretable
adapter types.Adapter
id int64
accuVar string
iterVar string
iterVar2 string
iterRange Interpretable
accu Interpretable
cond Interpretable
step Interpretable
result Interpretable
adapter types.Adapter
// note an exhaustive fold will ensure that all branches are evaluated
// when using mutable values, these branches will mutate the final result
// rather than make a throw-away computation.
exhaustive bool
interruptable bool
}
@@ -749,64 +757,30 @@ func (fold *evalFold) ID() int64 {
// Eval implements the Interpretable interface method.
func (fold *evalFold) Eval(ctx Activation) ref.Val {
// Initialize the folder interface
f := newFolder(fold, ctx)
defer releaseFolder(f)
foldRange := fold.iterRange.Eval(ctx)
if fold.iterVar2 != "" {
var foldable traits.Foldable
switch r := foldRange.(type) {
case traits.Mapper:
foldable = types.ToFoldableMap(r)
case traits.Lister:
foldable = types.ToFoldableList(r)
default:
return types.NewErrWithNodeID(fold.ID(), "unsupported comprehension range type: %T", foldRange)
}
foldable.Fold(f)
return f.evalResult()
}
if !foldRange.Type().HasTrait(traits.IterableType) {
return types.ValOrErr(foldRange, "got '%T', expected iterable type", foldRange)
}
// Configure the fold activation with the accumulator initial value.
accuCtx := varActivationPool.Get().(*varActivation)
accuCtx.parent = ctx
accuCtx.name = fold.accuVar
accuCtx.val = fold.accu.Eval(ctx)
// If the accumulator starts as an empty list, then the comprehension will build a list
// so create a mutable list to optimize the cost of the inner loop.
l, ok := accuCtx.val.(traits.Lister)
buildingList := false
if !fold.exhaustive && ok && l.Size() == types.IntZero {
buildingList = true
accuCtx.val = types.NewMutableList(fold.adapter)
}
iterCtx := varActivationPool.Get().(*varActivation)
iterCtx.parent = accuCtx
iterCtx.name = fold.iterVar
interrupted := false
it := foldRange.(traits.Iterable).Iterator()
for it.HasNext() == types.True {
// Modify the iter var in the fold activation.
iterCtx.val = it.Next()
// Evaluate the condition, terminate the loop if false.
cond := fold.cond.Eval(iterCtx)
condBool, ok := cond.(types.Bool)
if !fold.exhaustive && ok && condBool != types.True {
break
}
// Evaluate the evaluation step into accu var.
accuCtx.val = fold.step.Eval(iterCtx)
if fold.interruptable {
if stop, found := ctx.ResolveName("#interrupted"); found && stop == true {
interrupted = true
break
}
}
}
varActivationPool.Put(iterCtx)
if interrupted {
varActivationPool.Put(accuCtx)
return types.NewErr("operation interrupted")
}
// Compute the result.
res := fold.result.Eval(accuCtx)
varActivationPool.Put(accuCtx)
// Convert a mutable list to an immutable one, if the comprehension has generated a list as a result.
if !types.IsUnknownOrError(res) && buildingList {
if _, ok := res.(traits.MutableLister); ok {
res = res.(traits.MutableLister).ToImmutableList()
}
}
return res
iterable := foldRange.(traits.Iterable)
return f.foldIterable(iterable)
}
// Optional Interpretable implementations that specialize, subsume, or extend the core evaluation
@@ -1262,3 +1236,172 @@ func invalidOptionalEntryInit(field any, value ref.Val) ref.Val {
func invalidOptionalElementInit(value ref.Val) ref.Val {
return types.NewErr("cannot initialize optional list element from non-optional value %v", value)
}
// newFolder creates or initializes a pooled folder instance.
func newFolder(eval *evalFold, ctx Activation) *folder {
f := folderPool.Get().(*folder)
f.evalFold = eval
f.Activation = ctx
return f
}
// releaseFolder resets and releases a pooled folder instance.
func releaseFolder(f *folder) {
f.reset()
folderPool.Put(f)
}
// folder tracks the state associated with folding a list or map with a comprehension v2 style macro.
//
// The folder embeds an interpreter.Activation and Interpretable evalFold value as well as implements
// the traits.Folder interface methods.
//
// Instances of a folder are intended to be pooled to minimize allocation overhead with this temporary
// bookkeeping object which supports lazy evaluation of the accumulator init expression which is useful
// in preserving evaluation order semantics which might otherwise be disrupted through the use of
// cel.bind or cel.@block.
type folder struct {
*evalFold
Activation
// fold state objects.
accuVal ref.Val
iterVar1Val any
iterVar2Val any
// bookkeeping flags to modify Activation and fold behaviors.
initialized bool
mutableValue bool
interrupted bool
computeResult bool
}
func (f *folder) foldIterable(iterable traits.Iterable) ref.Val {
it := iterable.Iterator()
for it.HasNext() == types.True {
f.iterVar1Val = it.Next()
cond := f.cond.Eval(f)
condBool, ok := cond.(types.Bool)
if f.interrupted || (!f.exhaustive && ok && condBool != types.True) {
return f.evalResult()
}
// Update the accumulation value and check for eval interuption.
f.accuVal = f.step.Eval(f)
f.initialized = true
if f.interruptable && checkInterrupt(f.Activation) {
f.interrupted = true
return f.evalResult()
}
}
return f.evalResult()
}
// FoldEntry will either fold comprehension v1 style macros if iterVar2 is unset, or comprehension v2 style
// macros if both the iterVar and iterVar2 are set to non-empty strings.
func (f *folder) FoldEntry(key, val any) bool {
// Default to referencing both values.
f.iterVar1Val = key
f.iterVar2Val = val
// Terminate evaluation if evaluation is interrupted or the condition is not true and exhaustive
// eval is not enabled.
cond := f.cond.Eval(f)
condBool, ok := cond.(types.Bool)
if f.interrupted || (!f.exhaustive && ok && condBool != types.True) {
return false
}
// Update the accumulation value and check for eval interuption.
f.accuVal = f.step.Eval(f)
f.initialized = true
if f.interruptable && checkInterrupt(f.Activation) {
f.interrupted = true
return false
}
return true
}
// ResolveName overrides the default Activation lookup to perform lazy initialization of the accumulator
// and specialized lookups of iteration values with consideration for whether the final result is being
// computed and the iteration variables should be ignored.
func (f *folder) ResolveName(name string) (any, bool) {
if name == f.accuVar {
if !f.initialized {
f.initialized = true
initVal := f.accu.Eval(f.Activation)
if !f.exhaustive {
if l, isList := initVal.(traits.Lister); isList && l.Size() == types.IntZero {
initVal = types.NewMutableList(f.adapter)
f.mutableValue = true
}
if m, isMap := initVal.(traits.Mapper); isMap && m.Size() == types.IntZero {
initVal = types.NewMutableMap(f.adapter, map[ref.Val]ref.Val{})
f.mutableValue = true
}
}
f.accuVal = initVal
}
return f.accuVal, true
}
if !f.computeResult {
if name == f.iterVar {
f.iterVar1Val = f.adapter.NativeToValue(f.iterVar1Val)
return f.iterVar1Val, true
}
if name == f.iterVar2 {
f.iterVar2Val = f.adapter.NativeToValue(f.iterVar2Val)
return f.iterVar2Val, true
}
}
return f.Activation.ResolveName(name)
}
// evalResult computes the final result of the fold after all entries have been folded and accumulated.
func (f *folder) evalResult() ref.Val {
f.computeResult = true
if f.interrupted {
return types.NewErr("operation interrupted")
}
res := f.result.Eval(f)
// Convert a mutable list or map to an immutable one if the comprehension has generated a list or
// map as a result.
if !types.IsUnknownOrError(res) && f.mutableValue {
if _, ok := res.(traits.MutableLister); ok {
res = res.(traits.MutableLister).ToImmutableList()
}
if _, ok := res.(traits.MutableMapper); ok {
res = res.(traits.MutableMapper).ToImmutableMap()
}
}
return res
}
// reset clears any state associated with folder evaluation.
func (f *folder) reset() {
f.evalFold = nil
f.Activation = nil
f.accuVal = nil
f.iterVar1Val = nil
f.iterVar2Val = nil
f.initialized = false
f.mutableValue = false
f.interrupted = false
f.computeResult = false
}
func checkInterrupt(a Activation) bool {
stop, found := a.ResolveName("#interrupted")
return found && stop == true
}
var (
// pool of var folders to reduce allocations during folds.
folderPool = &sync.Pool{
New: func() any {
return &folder{}
},
}
)

View File

@@ -603,6 +603,7 @@ func (p *planner) planComprehension(expr ast.Expr) (Interpretable, error) {
accuVar: fold.AccuVar(),
accu: accu,
iterVar: fold.IterVar(),
iterVar2: fold.IterVar2(),
iterRange: iterRange,
cond: cond,
step: step,

View File

@@ -1,7 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
package(
default_visibility = ["//parser:__subpackages__"],
default_visibility = ["//:__subpackages__"],
licenses = ["notice"], # Apache 2.0
)

View File

@@ -115,7 +115,7 @@ func (p *parserHelper) newObjectField(fieldID int64, field string, value ast.Exp
func (p *parserHelper) newComprehension(ctx any,
iterRange ast.Expr,
iterVar string,
iterVar,
accuVar string,
accuInit ast.Expr,
condition ast.Expr,
@@ -125,6 +125,18 @@ func (p *parserHelper) newComprehension(ctx any,
p.newID(ctx), iterRange, iterVar, accuVar, accuInit, condition, step, result)
}
func (p *parserHelper) newComprehensionTwoVar(ctx any,
iterRange ast.Expr,
iterVar, iterVar2,
accuVar string,
accuInit ast.Expr,
condition ast.Expr,
step ast.Expr,
result ast.Expr) ast.Expr {
return p.exprFactory.NewComprehensionTwoVar(
p.newID(ctx), iterRange, iterVar, iterVar2, accuVar, accuInit, condition, step, result)
}
func (p *parserHelper) newID(ctx any) int64 {
if id, isID := ctx.(int64); isID {
return id
@@ -140,15 +152,12 @@ func (p *parserHelper) id(ctx any) int64 {
var offset ast.OffsetRange
switch c := ctx.(type) {
case antlr.ParserRuleContext:
start, stop := c.GetStart(), c.GetStop()
if stop == nil {
stop = start
}
start := c.GetStart()
offset.Start = p.sourceInfo.ComputeOffset(int32(start.GetLine()), int32(start.GetColumn()))
offset.Stop = p.sourceInfo.ComputeOffset(int32(stop.GetLine()), int32(stop.GetColumn()))
offset.Stop = offset.Start + int32(len(c.GetText()))
case antlr.Token:
offset.Start = p.sourceInfo.ComputeOffset(int32(c.GetLine()), int32(c.GetColumn()))
offset.Stop = offset.Start
offset.Stop = offset.Start + int32(len(c.GetText()))
case common.Location:
offset.Start = p.sourceInfo.ComputeOffset(int32(c.Line()), int32(c.Column()))
offset.Stop = offset.Start
@@ -164,10 +173,21 @@ func (p *parserHelper) id(ctx any) int64 {
return id
}
func (p *parserHelper) deleteID(id int64) {
p.sourceInfo.ClearOffsetRange(id)
if id == p.nextID-1 {
p.nextID--
}
}
func (p *parserHelper) getLocation(id int64) common.Location {
return p.sourceInfo.GetStartLocation(id)
}
func (p *parserHelper) getLocationByOffset(offset int32) common.Location {
return p.getSourceInfo().GetLocationByOffset(offset)
}
// buildMacroCallArg iterates the expression and returns a new expression
// where all macros have been replaced by their IDs in MacroCalls
func (p *parserHelper) buildMacroCallArg(expr ast.Expr) ast.Expr {
@@ -375,8 +395,10 @@ func (e *exprHelper) Copy(expr ast.Expr) ast.Expr {
cond := e.Copy(compre.LoopCondition())
step := e.Copy(compre.LoopStep())
result := e.Copy(compre.Result())
return e.exprFactory.NewComprehension(copyID,
iterRange, compre.IterVar(), compre.AccuVar(), accuInit, cond, step, result)
// All comprehensions can be represented by the two-variable comprehension since the
// differentiation between one and two-variable is whether the iterVar2 value is non-empty.
return e.exprFactory.NewComprehensionTwoVar(copyID,
iterRange, compre.IterVar(), compre.IterVar2(), compre.AccuVar(), accuInit, cond, step, result)
}
return e.exprFactory.NewUnspecifiedExpr(copyID)
}
@@ -424,6 +446,20 @@ func (e *exprHelper) NewComprehension(
e.nextMacroID(), iterRange, iterVar, accuVar, accuInit, condition, step, result)
}
// NewComprehensionTwoVar implements the ExprHelper interface method.
func (e *exprHelper) NewComprehensionTwoVar(
iterRange ast.Expr,
iterVar,
iterVar2,
accuVar string,
accuInit,
condition,
step,
result ast.Expr) ast.Expr {
return e.exprFactory.NewComprehensionTwoVar(
e.nextMacroID(), iterRange, iterVar, iterVar2, accuVar, accuInit, condition, step, result)
}
// NewIdent implements the ExprHelper interface method.
func (e *exprHelper) NewIdent(name string) ast.Expr {
return e.exprFactory.NewIdent(e.nextMacroID(), name)

View File

@@ -170,11 +170,12 @@ type ExprHelper interface {
// NewStructField creates a new struct field initializer from the field name and value.
NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr
// NewComprehension creates a new comprehension instruction.
// NewComprehension creates a new one-variable comprehension instruction.
//
// - iterRange represents the expression that resolves to a list or map where the elements or
// keys (respectively) will be iterated over.
// - iterVar is the iteration variable name.
// - iterVar is the variable name for the list element value, or the map key, depending on the
// range type.
// - accuVar is the accumulation variable name, typically parser.AccumulatorName.
// - accuInit is the initial expression whose value will be set for the accuVar prior to
// folding.
@@ -186,11 +187,36 @@ type ExprHelper interface {
// environment in the step and condition expressions. Presently, the name __result__ is commonly
// used by built-in macros but this may change in the future.
NewComprehension(iterRange ast.Expr,
iterVar string,
iterVar,
accuVar string,
accuInit ast.Expr,
condition ast.Expr,
step ast.Expr,
accuInit,
condition,
step,
result ast.Expr) ast.Expr
// NewComprehensionTwoVar creates a new two-variable comprehension instruction.
//
// - iterRange represents the expression that resolves to a list or map where the elements or
// keys (respectively) will be iterated over.
// - iterVar is the iteration variable assigned to the list index or the map key.
// - iterVar2 is the iteration variable assigned to the list element value or the map key value.
// - accuVar is the accumulation variable name, typically parser.AccumulatorName.
// - accuInit is the initial expression whose value will be set for the accuVar prior to
// folding.
// - condition is the expression to test to determine whether to continue folding.
// - step is the expression to evaluation at the conclusion of a single fold iteration.
// - result is the computation to evaluate at the conclusion of the fold.
//
// The accuVar should not shadow variable names that you would like to reference within the
// environment in the step and condition expressions. Presently, the name __result__ is commonly
// used by built-in macros but this may change in the future.
NewComprehensionTwoVar(iterRange ast.Expr,
iterVar,
iterVar2,
accuVar string,
accuInit,
condition,
step,
result ast.Expr) ast.Expr
// NewIdent creates an identifier Expr value.
@@ -382,13 +408,11 @@ func makeQuantifier(kind quantifierKind, eh ExprHelper, target ast.Expr, args []
step = eh.NewCall(operators.LogicalOr, eh.NewAccuIdent(), args[1])
result = eh.NewAccuIdent()
case quantifierExistsOne:
zeroExpr := eh.NewLiteral(types.Int(0))
oneExpr := eh.NewLiteral(types.Int(1))
init = zeroExpr
init = eh.NewLiteral(types.Int(0))
condition = eh.NewLiteral(types.True)
step = eh.NewCall(operators.Conditional, args[1],
eh.NewCall(operators.Add, eh.NewAccuIdent(), oneExpr), eh.NewAccuIdent())
result = eh.NewCall(operators.Equals, eh.NewAccuIdent(), oneExpr)
eh.NewCall(operators.Add, eh.NewAccuIdent(), eh.NewLiteral(types.Int(1))), eh.NewAccuIdent())
result = eh.NewCall(operators.Equals, eh.NewAccuIdent(), eh.NewLiteral(types.Int(1)))
default:
return nil, eh.NewError(args[0].ID(), fmt.Sprintf("unrecognized quantifier '%v'", kind))
}

View File

@@ -856,7 +856,8 @@ func (p *parser) reportError(ctx any, format string, args ...any) ast.Expr {
// ANTLR Parse listener implementations
func (p *parser) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
l := p.helper.source.NewLocation(line, column)
offset := p.helper.sourceInfo.ComputeOffset(int32(line), int32(column))
l := p.helper.getLocationByOffset(offset)
// Hack to keep existing error messages consistent with previous versions of CEL when a reserved word
// is used as an identifier. This behavior needs to be overhauled to provide consistent, normalized error
// messages out of ANTLR to prevent future breaking changes related to error message content.
@@ -916,10 +917,12 @@ func (p *parser) expandMacro(exprID int64, function string, target ast.Expr, arg
expr, err := macro.Expander()(eh, target, args)
// An error indicates that the macro was matched, but the arguments were not well-formed.
if err != nil {
if err.Location != nil {
return p.reportError(err.Location, err.Message), true
loc := err.Location
if loc == nil {
loc = p.helper.getLocation(exprID)
}
return p.reportError(p.helper.getLocation(exprID), err.Message), true
p.helper.deleteID(exprID)
return p.reportError(loc, err.Message), true
}
// A nil value from the macro indicates that the macro implementation decided that
// an expansion should not be performed.
@@ -929,6 +932,7 @@ func (p *parser) expandMacro(exprID int64, function string, target ast.Expr, arg
if p.populateMacroCalls {
p.helper.addMacroCall(expr.ID(), function, target, args...)
}
p.helper.deleteID(exprID)
return expr, true
}

View File

@@ -1,12 +0,0 @@
version = 1
test_patterns = [
"*_test.go"
]
[[analyzers]]
name = "go"
enabled = true
[analyzers.meta]
import_path = "github.com/imdario/mergo"

View File

@@ -1,33 +0,0 @@
#### joe made this: http://goel.io/joe
#### go ####
# Binaries for programs and plugins
*.exe
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
.glide/
#### vim ####
# Swap
[._]*.s[a-v][a-z]
[._]*.sw[a-p]
[._]s[a-v][a-z]
[._]sw[a-p]
# Session
Session.vim
# Temporary
.netrwhist
*~
# Auto-generated tag files
tags

View File

@@ -1,12 +0,0 @@
language: go
arch:
- amd64
- ppc64le
install:
- go get -t
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
script:
- go test -race -v ./...
after_script:
- $HOME/gopath/bin/goveralls -service=travis-ci -repotoken $COVERALLS_TOKEN

View File

@@ -1,46 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at i@dario.im. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View File

@@ -1,112 +0,0 @@
<!-- omit in toc -->
# Contributing to mergo
First off, thanks for taking the time to contribute! ❤️
All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉
> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:
> - Star the project
> - Tweet about it
> - Refer this project in your project's readme
> - Mention the project at local meetups and tell your friends/colleagues
<!-- omit in toc -->
## Table of Contents
- [Code of Conduct](#code-of-conduct)
- [I Have a Question](#i-have-a-question)
- [I Want To Contribute](#i-want-to-contribute)
- [Reporting Bugs](#reporting-bugs)
- [Suggesting Enhancements](#suggesting-enhancements)
## Code of Conduct
This project and everyone participating in it is governed by the
[mergo Code of Conduct](https://github.com/imdario/mergoblob/master/CODE_OF_CONDUCT.md).
By participating, you are expected to uphold this code. Please report unacceptable behavior
to <>.
## I Have a Question
> If you want to ask a question, we assume that you have read the available [Documentation](https://pkg.go.dev/github.com/imdario/mergo).
Before you ask a question, it is best to search for existing [Issues](https://github.com/imdario/mergo/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first.
If you then still feel the need to ask a question and need clarification, we recommend the following:
- Open an [Issue](https://github.com/imdario/mergo/issues/new).
- Provide as much context as you can about what you're running into.
- Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant.
We will then take care of the issue as soon as possible.
## I Want To Contribute
> ### Legal Notice <!-- omit in toc -->
> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.
### Reporting Bugs
<!-- omit in toc -->
#### Before Submitting a Bug Report
A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.
- Make sure that you are using the latest version.
- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](). If you are looking for support, you might want to check [this section](#i-have-a-question)).
- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/imdario/mergoissues?q=label%3Abug).
- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
- Collect information about the bug:
- Stack trace (Traceback)
- OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
- Version of the interpreter, compiler, SDK, runtime environment, package manager, depending on what seems relevant.
- Possibly your input and the output
- Can you reliably reproduce the issue? And can you also reproduce it with older versions?
<!-- omit in toc -->
#### How Do I Submit a Good Bug Report?
> You must never report security related issues, vulnerabilities or bugs including sensitive information to the issue tracker, or elsewhere in public. Instead sensitive bugs must be sent by email to .
<!-- You may add a PGP key to allow the messages to be sent encrypted as well. -->
We use GitHub issues to track bugs and errors. If you run into an issue with the project:
- Open an [Issue](https://github.com/imdario/mergo/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
- Explain the behavior you would expect and the actual behavior.
- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
- Provide the information you collected in the previous section.
Once it's filed:
- The project team will label the issue accordingly.
- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.
- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented by someone.
### Suggesting Enhancements
This section guides you through submitting an enhancement suggestion for mergo, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.
<!-- omit in toc -->
#### Before Submitting an Enhancement
- Make sure that you are using the latest version.
- Read the [documentation]() carefully and find out if the functionality is already covered, maybe by an individual configuration.
- Perform a [search](https://github.com/imdario/mergo/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
<!-- omit in toc -->
#### How Do I Submit a Good Enhancement Suggestion?
Enhancement suggestions are tracked as [GitHub issues](https://github.com/imdario/mergo/issues).
- Use a **clear and descriptive title** for the issue to identify the suggestion.
- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.
- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.
- You may want to **include screenshots and animated GIFs** which help you demonstrate the steps or point out the part which the suggestion is related to. You can use [this tool](https://www.cockos.com/licecap/) to record GIFs on macOS and Windows, and [this tool](https://github.com/colinkeenan/silentcast) or [this tool](https://github.com/GNOME/byzanz) on Linux. <!-- this should only be included if the project has a GUI -->
- **Explain why this enhancement would be useful** to most mergo users. You may also want to point out the other projects that solved it better and which could serve as inspiration.
<!-- omit in toc -->
## Attribution
This guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!

View File

@@ -1,28 +0,0 @@
Copyright (c) 2013 Dario Castañé. All rights reserved.
Copyright (c) 2012 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,242 +0,0 @@
# Mergo
[![GitHub release][5]][6]
[![GoCard][7]][8]
[![Test status][1]][2]
[![OpenSSF Scorecard][21]][22]
[![OpenSSF Best Practices][19]][20]
[![Coverage status][9]][10]
[![Sourcegraph][11]][12]
[![FOSSA status][13]][14]
[![GoDoc][3]][4]
[![Become my sponsor][15]][16]
[![Tidelift][17]][18]
[1]: https://github.com/imdario/mergo/workflows/tests/badge.svg?branch=master
[2]: https://github.com/imdario/mergo/actions/workflows/tests.yml
[3]: https://godoc.org/github.com/imdario/mergo?status.svg
[4]: https://godoc.org/github.com/imdario/mergo
[5]: https://img.shields.io/github/release/imdario/mergo.svg
[6]: https://github.com/imdario/mergo/releases
[7]: https://goreportcard.com/badge/imdario/mergo
[8]: https://goreportcard.com/report/github.com/imdario/mergo
[9]: https://coveralls.io/repos/github/imdario/mergo/badge.svg?branch=master
[10]: https://coveralls.io/github/imdario/mergo?branch=master
[11]: https://sourcegraph.com/github.com/imdario/mergo/-/badge.svg
[12]: https://sourcegraph.com/github.com/imdario/mergo?badge
[13]: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield
[14]: https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield
[15]: https://img.shields.io/github/sponsors/imdario
[16]: https://github.com/sponsors/imdario
[17]: https://tidelift.com/badges/package/go/github.com%2Fimdario%2Fmergo
[18]: https://tidelift.com/subscription/pkg/go-github.com-imdario-mergo
[19]: https://bestpractices.coreinfrastructure.org/projects/7177/badge
[20]: https://bestpractices.coreinfrastructure.org/projects/7177
[21]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo/badge
[22]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo
A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region of Marche.
## Status
It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, Microsoft, etc](https://github.com/imdario/mergo#mergo-in-the-wild).
### Important note
Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds support for go modules.
Keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2), Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). I added an optional/variadic argument so that it won't break the existing code.
If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0).
### Donations
If Mergo is useful to you, consider buying me a coffee, a beer, or making a monthly donation to allow me to keep building great free software. :heart_eyes:
<a href='https://ko-fi.com/B0B58839' target='_blank'><img height='36' style='border:0px;height:36px;' src='https://az743702.vo.msecnd.net/cdn/kofi1.png?v=0' border='0' alt='Buy Me a Coffee at ko-fi.com' /></a>
<a href="https://liberapay.com/dario/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg"></a>
<a href='https://github.com/sponsors/imdario' target='_blank'><img alt="Become my sponsor" src="https://img.shields.io/github/sponsors/imdario?style=for-the-badge" /></a>
### Mergo in the wild
- [moby/moby](https://github.com/moby/moby)
- [kubernetes/kubernetes](https://github.com/kubernetes/kubernetes)
- [vmware/dispatch](https://github.com/vmware/dispatch)
- [Shopify/themekit](https://github.com/Shopify/themekit)
- [imdario/zas](https://github.com/imdario/zas)
- [matcornic/hermes](https://github.com/matcornic/hermes)
- [OpenBazaar/openbazaar-go](https://github.com/OpenBazaar/openbazaar-go)
- [kataras/iris](https://github.com/kataras/iris)
- [michaelsauter/crane](https://github.com/michaelsauter/crane)
- [go-task/task](https://github.com/go-task/task)
- [sensu/uchiwa](https://github.com/sensu/uchiwa)
- [ory/hydra](https://github.com/ory/hydra)
- [sisatech/vcli](https://github.com/sisatech/vcli)
- [dairycart/dairycart](https://github.com/dairycart/dairycart)
- [projectcalico/felix](https://github.com/projectcalico/felix)
- [resin-os/balena](https://github.com/resin-os/balena)
- [go-kivik/kivik](https://github.com/go-kivik/kivik)
- [Telefonica/govice](https://github.com/Telefonica/govice)
- [supergiant/supergiant](supergiant/supergiant)
- [SergeyTsalkov/brooce](https://github.com/SergeyTsalkov/brooce)
- [soniah/dnsmadeeasy](https://github.com/soniah/dnsmadeeasy)
- [ohsu-comp-bio/funnel](https://github.com/ohsu-comp-bio/funnel)
- [EagerIO/Stout](https://github.com/EagerIO/Stout)
- [lynndylanhurley/defsynth-api](https://github.com/lynndylanhurley/defsynth-api)
- [russross/canvasassignments](https://github.com/russross/canvasassignments)
- [rdegges/cryptly-api](https://github.com/rdegges/cryptly-api)
- [casualjim/exeggutor](https://github.com/casualjim/exeggutor)
- [divshot/gitling](https://github.com/divshot/gitling)
- [RWJMurphy/gorl](https://github.com/RWJMurphy/gorl)
- [andrerocker/deploy42](https://github.com/andrerocker/deploy42)
- [elwinar/rambler](https://github.com/elwinar/rambler)
- [tmaiaroto/gopartman](https://github.com/tmaiaroto/gopartman)
- [jfbus/impressionist](https://github.com/jfbus/impressionist)
- [Jmeyering/zealot](https://github.com/Jmeyering/zealot)
- [godep-migrator/rigger-host](https://github.com/godep-migrator/rigger-host)
- [Dronevery/MultiwaySwitch-Go](https://github.com/Dronevery/MultiwaySwitch-Go)
- [thoas/picfit](https://github.com/thoas/picfit)
- [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server)
- [jnuthong/item_search](https://github.com/jnuthong/item_search)
- [bukalapak/snowboard](https://github.com/bukalapak/snowboard)
- [containerssh/containerssh](https://github.com/containerssh/containerssh)
- [goreleaser/goreleaser](https://github.com/goreleaser/goreleaser)
- [tjpnz/structbot](https://github.com/tjpnz/structbot)
## Install
go get github.com/imdario/mergo
// use in your .go code
import (
"github.com/imdario/mergo"
)
## Usage
You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as [they are zero values](https://golang.org/ref/spec#The_zero_value) too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection).
```go
if err := mergo.Merge(&dst, src); err != nil {
// ...
}
```
Also, you can merge overwriting values using the transformer `WithOverride`.
```go
if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil {
// ...
}
```
Additionally, you can map a `map[string]interface{}` to a struct (and otherwise, from struct to map), following the same restrictions as in `Merge()`. Keys are capitalized to find each corresponding exported field.
```go
if err := mergo.Map(&dst, srcMap); err != nil {
// ...
}
```
Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as `map[string]interface{}`. They will be just assigned as values.
Here is a nice example:
```go
package main
import (
"fmt"
"github.com/imdario/mergo"
)
type Foo struct {
A string
B int64
}
func main() {
src := Foo{
A: "one",
B: 2,
}
dest := Foo{
A: "two",
}
mergo.Merge(&dest, src)
fmt.Println(dest)
// Will print
// {two 2}
}
```
Note: if test are failing due missing package, please execute:
go get gopkg.in/yaml.v3
### Transformers
Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, `time.Time` is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero `time.Time`?
```go
package main
import (
"fmt"
"github.com/imdario/mergo"
"reflect"
"time"
)
type timeTransformer struct {
}
func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
if typ == reflect.TypeOf(time.Time{}) {
return func(dst, src reflect.Value) error {
if dst.CanSet() {
isZero := dst.MethodByName("IsZero")
result := isZero.Call([]reflect.Value{})
if result[0].Bool() {
dst.Set(src)
}
}
return nil
}
}
return nil
}
type Snapshot struct {
Time time.Time
// ...
}
func main() {
src := Snapshot{time.Now()}
dest := Snapshot{}
mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{}))
fmt.Println(dest)
// Will print
// { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 }
}
```
## Contact me
If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario)
## About
Written by [Dario Castañé](http://dario.im).
## License
[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE).
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_large)

View File

@@ -1,14 +0,0 @@
# Security Policy
## Supported Versions
| Version | Supported |
| ------- | ------------------ |
| 0.3.x | :white_check_mark: |
| < 0.3 | :x: |
## Security contact information
To report a security vulnerability, please use the
[Tidelift security contact](https://tidelift.com/security).
Tidelift will coordinate the fix and disclosure.

View File

@@ -1,143 +0,0 @@
// Copyright 2013 Dario Castañé. All rights reserved.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
Status
It is ready for production use. It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc.
Important note
Please keep in mind that a problematic PR broke 0.3.9. We reverted it in 0.3.10. We consider 0.3.10 as stable but not bug-free. . Also, this version adds suppot for go modules.
Keep in mind that in 0.3.2, Mergo changed Merge() and Map() signatures to support transformers. We added an optional/variadic argument so that it won't break the existing code.
If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with go get -u github.com/imdario/mergo. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0).
Install
Do your usual installation procedure:
go get github.com/imdario/mergo
// use in your .go code
import (
"github.com/imdario/mergo"
)
Usage
You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as they are zero values too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection).
if err := mergo.Merge(&dst, src); err != nil {
// ...
}
Also, you can merge overwriting values using the transformer WithOverride.
if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil {
// ...
}
Additionally, you can map a map[string]interface{} to a struct (and otherwise, from struct to map), following the same restrictions as in Merge(). Keys are capitalized to find each corresponding exported field.
if err := mergo.Map(&dst, srcMap); err != nil {
// ...
}
Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as map[string]interface{}. They will be just assigned as values.
Here is a nice example:
package main
import (
"fmt"
"github.com/imdario/mergo"
)
type Foo struct {
A string
B int64
}
func main() {
src := Foo{
A: "one",
B: 2,
}
dest := Foo{
A: "two",
}
mergo.Merge(&dest, src)
fmt.Println(dest)
// Will print
// {two 2}
}
Transformers
Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, time.Time is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero time.Time?
package main
import (
"fmt"
"github.com/imdario/mergo"
"reflect"
"time"
)
type timeTransformer struct {
}
func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
if typ == reflect.TypeOf(time.Time{}) {
return func(dst, src reflect.Value) error {
if dst.CanSet() {
isZero := dst.MethodByName("IsZero")
result := isZero.Call([]reflect.Value{})
if result[0].Bool() {
dst.Set(src)
}
}
return nil
}
}
return nil
}
type Snapshot struct {
Time time.Time
// ...
}
func main() {
src := Snapshot{time.Now()}
dest := Snapshot{}
mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{}))
fmt.Println(dest)
// Will print
// { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 }
}
Contact me
If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): https://twitter.com/im_dario
About
Written by Dario Castañé: https://da.rio.hn
License
BSD 3-Clause license, as Go language.
*/
package mergo

View File

@@ -1,178 +0,0 @@
// Copyright 2014 Dario Castañé. All rights reserved.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Based on src/pkg/reflect/deepequal.go from official
// golang's stdlib.
package mergo
import (
"fmt"
"reflect"
"unicode"
"unicode/utf8"
)
func changeInitialCase(s string, mapper func(rune) rune) string {
if s == "" {
return s
}
r, n := utf8.DecodeRuneInString(s)
return string(mapper(r)) + s[n:]
}
func isExported(field reflect.StructField) bool {
r, _ := utf8.DecodeRuneInString(field.Name)
return r >= 'A' && r <= 'Z'
}
// Traverses recursively both values, assigning src's fields values to dst.
// The map argument tracks comparisons that have already been seen, which allows
// short circuiting on recursive types.
func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
overwrite := config.Overwrite
if dst.CanAddr() {
addr := dst.UnsafeAddr()
h := 17 * addr
seen := visited[h]
typ := dst.Type()
for p := seen; p != nil; p = p.next {
if p.ptr == addr && p.typ == typ {
return nil
}
}
// Remember, remember...
visited[h] = &visit{typ, seen, addr}
}
zeroValue := reflect.Value{}
switch dst.Kind() {
case reflect.Map:
dstMap := dst.Interface().(map[string]interface{})
for i, n := 0, src.NumField(); i < n; i++ {
srcType := src.Type()
field := srcType.Field(i)
if !isExported(field) {
continue
}
fieldName := field.Name
fieldName = changeInitialCase(fieldName, unicode.ToLower)
if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v), !config.ShouldNotDereference) || overwrite) {
dstMap[fieldName] = src.Field(i).Interface()
}
}
case reflect.Ptr:
if dst.IsNil() {
v := reflect.New(dst.Type().Elem())
dst.Set(v)
}
dst = dst.Elem()
fallthrough
case reflect.Struct:
srcMap := src.Interface().(map[string]interface{})
for key := range srcMap {
config.overwriteWithEmptyValue = true
srcValue := srcMap[key]
fieldName := changeInitialCase(key, unicode.ToUpper)
dstElement := dst.FieldByName(fieldName)
if dstElement == zeroValue {
// We discard it because the field doesn't exist.
continue
}
srcElement := reflect.ValueOf(srcValue)
dstKind := dstElement.Kind()
srcKind := srcElement.Kind()
if srcKind == reflect.Ptr && dstKind != reflect.Ptr {
srcElement = srcElement.Elem()
srcKind = reflect.TypeOf(srcElement.Interface()).Kind()
} else if dstKind == reflect.Ptr {
// Can this work? I guess it can't.
if srcKind != reflect.Ptr && srcElement.CanAddr() {
srcPtr := srcElement.Addr()
srcElement = reflect.ValueOf(srcPtr)
srcKind = reflect.Ptr
}
}
if !srcElement.IsValid() {
continue
}
if srcKind == dstKind {
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
return
}
} else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface {
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
return
}
} else if srcKind == reflect.Map {
if err = deepMap(dstElement, srcElement, visited, depth+1, config); err != nil {
return
}
} else {
return fmt.Errorf("type mismatch on %s field: found %v, expected %v", fieldName, srcKind, dstKind)
}
}
}
return
}
// Map sets fields' values in dst from src.
// src can be a map with string keys or a struct. dst must be the opposite:
// if src is a map, dst must be a valid pointer to struct. If src is a struct,
// dst must be map[string]interface{}.
// It won't merge unexported (private) fields and will do recursively
// any exported field.
// If dst is a map, keys will be src fields' names in lower camel case.
// Missing key in src that doesn't match a field in dst will be skipped. This
// doesn't apply if dst is a map.
// This is separated method from Merge because it is cleaner and it keeps sane
// semantics: merging equal types, mapping different (restricted) types.
func Map(dst, src interface{}, opts ...func(*Config)) error {
return _map(dst, src, opts...)
}
// MapWithOverwrite will do the same as Map except that non-empty dst attributes will be overridden by
// non-empty src attribute values.
// Deprecated: Use Map(…) with WithOverride
func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
return _map(dst, src, append(opts, WithOverride)...)
}
func _map(dst, src interface{}, opts ...func(*Config)) error {
if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
return ErrNonPointerArgument
}
var (
vDst, vSrc reflect.Value
err error
)
config := &Config{}
for _, opt := range opts {
opt(config)
}
if vDst, vSrc, err = resolveValues(dst, src); err != nil {
return err
}
// To be friction-less, we redirect equal-type arguments
// to deepMerge. Only because arguments can be anything.
if vSrc.Kind() == vDst.Kind() {
return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
}
switch vSrc.Kind() {
case reflect.Struct:
if vDst.Kind() != reflect.Map {
return ErrExpectedMapAsDestination
}
case reflect.Map:
if vDst.Kind() != reflect.Struct {
return ErrExpectedStructAsDestination
}
default:
return ErrNotSupported
}
return deepMap(vDst, vSrc, make(map[uintptr]*visit), 0, config)
}

View File

@@ -1,409 +0,0 @@
// Copyright 2013 Dario Castañé. All rights reserved.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Based on src/pkg/reflect/deepequal.go from official
// golang's stdlib.
package mergo
import (
"fmt"
"reflect"
)
func hasMergeableFields(dst reflect.Value) (exported bool) {
for i, n := 0, dst.NumField(); i < n; i++ {
field := dst.Type().Field(i)
if field.Anonymous && dst.Field(i).Kind() == reflect.Struct {
exported = exported || hasMergeableFields(dst.Field(i))
} else if isExportedComponent(&field) {
exported = exported || len(field.PkgPath) == 0
}
}
return
}
func isExportedComponent(field *reflect.StructField) bool {
pkgPath := field.PkgPath
if len(pkgPath) > 0 {
return false
}
c := field.Name[0]
if 'a' <= c && c <= 'z' || c == '_' {
return false
}
return true
}
type Config struct {
Transformers Transformers
Overwrite bool
ShouldNotDereference bool
AppendSlice bool
TypeCheck bool
overwriteWithEmptyValue bool
overwriteSliceWithEmptyValue bool
sliceDeepCopy bool
debug bool
}
type Transformers interface {
Transformer(reflect.Type) func(dst, src reflect.Value) error
}
// Traverses recursively both values, assigning src's fields values to dst.
// The map argument tracks comparisons that have already been seen, which allows
// short circuiting on recursive types.
func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
overwrite := config.Overwrite
typeCheck := config.TypeCheck
overwriteWithEmptySrc := config.overwriteWithEmptyValue
overwriteSliceWithEmptySrc := config.overwriteSliceWithEmptyValue
sliceDeepCopy := config.sliceDeepCopy
if !src.IsValid() {
return
}
if dst.CanAddr() {
addr := dst.UnsafeAddr()
h := 17 * addr
seen := visited[h]
typ := dst.Type()
for p := seen; p != nil; p = p.next {
if p.ptr == addr && p.typ == typ {
return nil
}
}
// Remember, remember...
visited[h] = &visit{typ, seen, addr}
}
if config.Transformers != nil && !isReflectNil(dst) && dst.IsValid() {
if fn := config.Transformers.Transformer(dst.Type()); fn != nil {
err = fn(dst, src)
return
}
}
switch dst.Kind() {
case reflect.Struct:
if hasMergeableFields(dst) {
for i, n := 0, dst.NumField(); i < n; i++ {
if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, config); err != nil {
return
}
}
} else {
if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc) {
dst.Set(src)
}
}
case reflect.Map:
if dst.IsNil() && !src.IsNil() {
if dst.CanSet() {
dst.Set(reflect.MakeMap(dst.Type()))
} else {
dst = src
return
}
}
if src.Kind() != reflect.Map {
if overwrite && dst.CanSet() {
dst.Set(src)
}
return
}
for _, key := range src.MapKeys() {
srcElement := src.MapIndex(key)
if !srcElement.IsValid() {
continue
}
dstElement := dst.MapIndex(key)
switch srcElement.Kind() {
case reflect.Chan, reflect.Func, reflect.Map, reflect.Interface, reflect.Slice:
if srcElement.IsNil() {
if overwrite {
dst.SetMapIndex(key, srcElement)
}
continue
}
fallthrough
default:
if !srcElement.CanInterface() {
continue
}
switch reflect.TypeOf(srcElement.Interface()).Kind() {
case reflect.Struct:
fallthrough
case reflect.Ptr:
fallthrough
case reflect.Map:
srcMapElm := srcElement
dstMapElm := dstElement
if srcMapElm.CanInterface() {
srcMapElm = reflect.ValueOf(srcMapElm.Interface())
if dstMapElm.IsValid() {
dstMapElm = reflect.ValueOf(dstMapElm.Interface())
}
}
if err = deepMerge(dstMapElm, srcMapElm, visited, depth+1, config); err != nil {
return
}
case reflect.Slice:
srcSlice := reflect.ValueOf(srcElement.Interface())
var dstSlice reflect.Value
if !dstElement.IsValid() || dstElement.IsNil() {
dstSlice = reflect.MakeSlice(srcSlice.Type(), 0, srcSlice.Len())
} else {
dstSlice = reflect.ValueOf(dstElement.Interface())
}
if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy {
if typeCheck && srcSlice.Type() != dstSlice.Type() {
return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
}
dstSlice = srcSlice
} else if config.AppendSlice {
if srcSlice.Type() != dstSlice.Type() {
return fmt.Errorf("cannot append two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
}
dstSlice = reflect.AppendSlice(dstSlice, srcSlice)
} else if sliceDeepCopy {
i := 0
for ; i < srcSlice.Len() && i < dstSlice.Len(); i++ {
srcElement := srcSlice.Index(i)
dstElement := dstSlice.Index(i)
if srcElement.CanInterface() {
srcElement = reflect.ValueOf(srcElement.Interface())
}
if dstElement.CanInterface() {
dstElement = reflect.ValueOf(dstElement.Interface())
}
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
return
}
}
}
dst.SetMapIndex(key, dstSlice)
}
}
if dstElement.IsValid() && !isEmptyValue(dstElement, !config.ShouldNotDereference) {
if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice {
continue
}
if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map && reflect.TypeOf(dstElement.Interface()).Kind() == reflect.Map {
continue
}
}
if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement, !config.ShouldNotDereference)) {
if dst.IsNil() {
dst.Set(reflect.MakeMap(dst.Type()))
}
dst.SetMapIndex(key, srcElement)
}
}
// Ensure that all keys in dst are deleted if they are not in src.
if overwriteWithEmptySrc {
for _, key := range dst.MapKeys() {
srcElement := src.MapIndex(key)
if !srcElement.IsValid() {
dst.SetMapIndex(key, reflect.Value{})
}
}
}
case reflect.Slice:
if !dst.CanSet() {
break
}
if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy {
dst.Set(src)
} else if config.AppendSlice {
if src.Type() != dst.Type() {
return fmt.Errorf("cannot append two slice with different type (%s, %s)", src.Type(), dst.Type())
}
dst.Set(reflect.AppendSlice(dst, src))
} else if sliceDeepCopy {
for i := 0; i < src.Len() && i < dst.Len(); i++ {
srcElement := src.Index(i)
dstElement := dst.Index(i)
if srcElement.CanInterface() {
srcElement = reflect.ValueOf(srcElement.Interface())
}
if dstElement.CanInterface() {
dstElement = reflect.ValueOf(dstElement.Interface())
}
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
return
}
}
}
case reflect.Ptr:
fallthrough
case reflect.Interface:
if isReflectNil(src) {
if overwriteWithEmptySrc && dst.CanSet() && src.Type().AssignableTo(dst.Type()) {
dst.Set(src)
}
break
}
if src.Kind() != reflect.Interface {
if dst.IsNil() || (src.Kind() != reflect.Ptr && overwrite) {
if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) {
dst.Set(src)
}
} else if src.Kind() == reflect.Ptr {
if !config.ShouldNotDereference {
if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
return
}
} else {
if overwriteWithEmptySrc || (overwrite && !src.IsNil()) || dst.IsNil() {
dst.Set(src)
}
}
} else if dst.Elem().Type() == src.Type() {
if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil {
return
}
} else {
return ErrDifferentArgumentsTypes
}
break
}
if dst.IsNil() || overwrite {
if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) {
dst.Set(src)
}
break
}
if dst.Elem().Kind() == src.Elem().Kind() {
if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
return
}
break
}
default:
mustSet := (isEmptyValue(dst, !config.ShouldNotDereference) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc)
if mustSet {
if dst.CanSet() {
dst.Set(src)
} else {
dst = src
}
}
}
return
}
// Merge will fill any empty for value type attributes on the dst struct using corresponding
// src attributes if they themselves are not empty. dst and src must be valid same-type structs
// and dst must be a pointer to struct.
// It won't merge unexported (private) fields and will do recursively any exported field.
func Merge(dst, src interface{}, opts ...func(*Config)) error {
return merge(dst, src, opts...)
}
// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overridden by
// non-empty src attribute values.
// Deprecated: use Merge(…) with WithOverride
func MergeWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
return merge(dst, src, append(opts, WithOverride)...)
}
// WithTransformers adds transformers to merge, allowing to customize the merging of some types.
func WithTransformers(transformers Transformers) func(*Config) {
return func(config *Config) {
config.Transformers = transformers
}
}
// WithOverride will make merge override non-empty dst attributes with non-empty src attributes values.
func WithOverride(config *Config) {
config.Overwrite = true
}
// WithOverwriteWithEmptyValue will make merge override non empty dst attributes with empty src attributes values.
func WithOverwriteWithEmptyValue(config *Config) {
config.Overwrite = true
config.overwriteWithEmptyValue = true
}
// WithOverrideEmptySlice will make merge override empty dst slice with empty src slice.
func WithOverrideEmptySlice(config *Config) {
config.overwriteSliceWithEmptyValue = true
}
// WithoutDereference prevents dereferencing pointers when evaluating whether they are empty
// (i.e. a non-nil pointer is never considered empty).
func WithoutDereference(config *Config) {
config.ShouldNotDereference = true
}
// WithAppendSlice will make merge append slices instead of overwriting it.
func WithAppendSlice(config *Config) {
config.AppendSlice = true
}
// WithTypeCheck will make merge check types while overwriting it (must be used with WithOverride).
func WithTypeCheck(config *Config) {
config.TypeCheck = true
}
// WithSliceDeepCopy will merge slice element one by one with Overwrite flag.
func WithSliceDeepCopy(config *Config) {
config.sliceDeepCopy = true
config.Overwrite = true
}
func merge(dst, src interface{}, opts ...func(*Config)) error {
if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
return ErrNonPointerArgument
}
var (
vDst, vSrc reflect.Value
err error
)
config := &Config{}
for _, opt := range opts {
opt(config)
}
if vDst, vSrc, err = resolveValues(dst, src); err != nil {
return err
}
if vDst.Type() != vSrc.Type() {
return ErrDifferentArgumentsTypes
}
return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
}
// IsReflectNil is the reflect value provided nil
func isReflectNil(v reflect.Value) bool {
k := v.Kind()
switch k {
case reflect.Interface, reflect.Slice, reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr:
// Both interface and slice are nil if first word is 0.
// Both are always bigger than a word; assume flagIndir.
return v.IsNil()
default:
return false
}
}

View File

@@ -1,81 +0,0 @@
// Copyright 2013 Dario Castañé. All rights reserved.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Based on src/pkg/reflect/deepequal.go from official
// golang's stdlib.
package mergo
import (
"errors"
"reflect"
)
// Errors reported by Mergo when it finds invalid arguments.
var (
ErrNilArguments = errors.New("src and dst must not be nil")
ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type")
ErrNotSupported = errors.New("only structs, maps, and slices are supported")
ErrExpectedMapAsDestination = errors.New("dst was expected to be a map")
ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct")
ErrNonPointerArgument = errors.New("dst must be a pointer")
)
// During deepMerge, must keep track of checks that are
// in progress. The comparison algorithm assumes that all
// checks in progress are true when it reencounters them.
// Visited are stored in a map indexed by 17 * a1 + a2;
type visit struct {
typ reflect.Type
next *visit
ptr uintptr
}
// From src/pkg/encoding/json/encode.go.
func isEmptyValue(v reflect.Value, shouldDereference bool) bool {
switch v.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Ptr:
if v.IsNil() {
return true
}
if shouldDereference {
return isEmptyValue(v.Elem(), shouldDereference)
}
return false
case reflect.Func:
return v.IsNil()
case reflect.Invalid:
return true
}
return false
}
func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) {
if dst == nil || src == nil {
err = ErrNilArguments
return
}
vDst = reflect.ValueOf(dst).Elem()
if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map && vDst.Kind() != reflect.Slice {
err = ErrNotSupported
return
}
vSrc = reflect.ValueOf(src)
// We check if vSrc is a pointer to dereference it.
if vSrc.Kind() == reflect.Ptr {
vSrc = vSrc.Elem()
}
return
}

View File

@@ -1,5 +1,5 @@
[![CircleCI](https://circleci.com/gh/stoewer/go-strcase/tree/master.svg?style=svg)](https://circleci.com/gh/stoewer/go-strcase/tree/master)
[![codecov](https://codecov.io/gh/stoewer/go-strcase/branch/master/graph/badge.svg)](https://codecov.io/gh/stoewer/go-strcase)
[![GH Actions](https://github.com/stoewer/go-strcase/actions/workflows/lint-test.yml/badge.svg?branch=master)](https://github.com/stoewer/go-strcase/actions)
[![codecov](https://codecov.io/github/stoewer/go-strcase/branch/master/graph/badge.svg?token=c0UokYnop5)](https://codecov.io/github/stoewer/go-strcase)
[![GoDoc](https://godoc.org/github.com/stoewer/go-strcase?status.svg)](https://pkg.go.dev/github.com/stoewer/go-strcase)
---

View File

@@ -27,6 +27,9 @@ func camelCase(s string, upper bool) string {
buffer = append(buffer, toUpper(curr))
} else if isLower(prev) {
buffer = append(buffer, curr)
} else if isUpper(prev) && isUpper(curr) && isLower(next) {
// Assume a case like "R" for "XRequestId"
buffer = append(buffer, curr)
} else {
buffer = append(buffer, toLower(curr))
}