From 0ec160c0cb04c47230886a37b17d98a9f8bc1516 Mon Sep 17 00:00:00 2001 From: zhuojie Date: Mon, 15 Apr 2019 22:59:33 -0700 Subject: [PATCH] Add eval cache source --- Gopkg.lock | 18 +- docs/api_docs/bundle.yaml | 17 ++ pkg/config/config.go | 13 ++ pkg/config/env.go | 42 ++-- pkg/entity/flag.go | 2 +- pkg/handler/eval_cache.go | 62 +++--- pkg/handler/eval_cache_fetcher.go | 140 ++++++++++++ pkg/handler/eval_cache_fetcher_test.go | 100 +++++++++ pkg/handler/eval_test.go | 12 +- pkg/handler/export.go | 6 + pkg/handler/export_test.go | 15 ++ pkg/handler/fixture.go | 3 +- pkg/handler/handler.go | 11 +- pkg/handler/testdata/sample_eval_cache.json | 200 +++++++++++++++++ swagger/export_eval_cache_json.yaml | 16 ++ swagger/index.yaml | 2 + swagger_gen/restapi/embedded_spec.go | 52 +++++ .../export/get_export_eval_cache_json.go | 58 +++++ .../get_export_eval_cache_json_parameters.go | 45 ++++ .../get_export_eval_cache_json_responses.go | 115 ++++++++++ .../get_export_eval_cache_json_urlbuilder.go | 87 ++++++++ swagger_gen/restapi/operations/flagr_api.go | 14 ++ vendor/github.com/bouk/monkey/LICENSE.md | 3 - vendor/github.com/bouk/monkey/README.md | 112 ---------- vendor/github.com/bouk/monkey/circle.yml | 3 - vendor/github.com/bouk/monkey/monkey.go | 133 ------------ vendor/github.com/bouk/monkey/monkey_386.go | 13 -- vendor/github.com/bouk/monkey/monkey_amd64.go | 17 -- vendor/github.com/bouk/monkey/replace.go | 31 --- vendor/github.com/bouk/monkey/replace_unix.go | 26 --- .../github.com/bouk/monkey/replace_windows.go | 44 ---- .../zhouzhuojie/withtimeout/.gitignore | 1 + .../zhouzhuojie/withtimeout/LICENSE | 202 ++++++++++++++++++ .../zhouzhuojie/withtimeout/Makefile | 11 + .../zhouzhuojie/withtimeout/README.md | 4 + .../github.com/zhouzhuojie/withtimeout/go.mod | 5 + .../github.com/zhouzhuojie/withtimeout/go.sum | 6 + .../zhouzhuojie/withtimeout/withtimeout.go | 39 ++++ 38 files changed, 1230 insertions(+), 450 deletions(-) create mode 100644 pkg/handler/eval_cache_fetcher.go create mode 100644 pkg/handler/eval_cache_fetcher_test.go create mode 100644 pkg/handler/testdata/sample_eval_cache.json create mode 100644 swagger/export_eval_cache_json.yaml create mode 100644 swagger_gen/restapi/operations/export/get_export_eval_cache_json.go create mode 100644 swagger_gen/restapi/operations/export/get_export_eval_cache_json_parameters.go create mode 100644 swagger_gen/restapi/operations/export/get_export_eval_cache_json_responses.go create mode 100644 swagger_gen/restapi/operations/export/get_export_eval_cache_json_urlbuilder.go delete mode 100644 vendor/github.com/bouk/monkey/LICENSE.md delete mode 100644 vendor/github.com/bouk/monkey/README.md delete mode 100644 vendor/github.com/bouk/monkey/circle.yml delete mode 100644 vendor/github.com/bouk/monkey/monkey.go delete mode 100644 vendor/github.com/bouk/monkey/monkey_386.go delete mode 100644 vendor/github.com/bouk/monkey/monkey_amd64.go delete mode 100644 vendor/github.com/bouk/monkey/replace.go delete mode 100644 vendor/github.com/bouk/monkey/replace_unix.go delete mode 100644 vendor/github.com/bouk/monkey/replace_windows.go create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/.gitignore create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/LICENSE create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/Makefile create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/README.md create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/go.mod create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/go.sum create mode 100644 vendor/github.com/zhouzhuojie/withtimeout/withtimeout.go diff --git a/Gopkg.lock b/Gopkg.lock index 6fa5aea2..e9ac4e8f 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -130,14 +130,6 @@ pruneopts = "UT" revision = "3a771d992973f24aa725d07868b467d1ddfceafb" -[[projects]] - digest = "1:d8abdc866ebbe05fa3bce50863e23afd4c73c804b90c908caa864e86df1db8a1" - name = "github.com/bouk/monkey" - packages = ["."] - pruneopts = "UT" - revision = "5df1f207ff77e025801505ae4d903133a0b4353f" - version = "v1.0.0" - [[projects]] branch = "master" digest = "1:a2b3fb724251a4237e10b520bfbf1a39d9acde5ae31a28c43b3920289d119836" @@ -700,6 +692,14 @@ revision = "7de314ba1d596c8ffbb61340bb97bdda799c665e" version = "0.2.2" +[[projects]] + digest = "1:968309907f6174eb5ec1f9df1ceb0b4f492ab2bf333f8213740ddea2a21c4b61" + name = "github.com/zhouzhuojie/withtimeout" + packages = ["."] + pruneopts = "UT" + revision = "12b39eb2edd5059118f5461e97a255c7b1d56d4d" + version = "1.0.0" + [[projects]] digest = "1:5baa1a55624558e55a544de8b81542193852b0d607a9e21d9296e82d3e698918" name = "go.opencensus.io" @@ -946,7 +946,6 @@ "github.com/aws/aws-sdk-go/aws", "github.com/aws/aws-sdk-go/aws/session", "github.com/aws/aws-sdk-go/service/kinesis", - "github.com/bouk/monkey", "github.com/brandur/simplebox", "github.com/bsm/ratelimit", "github.com/caarlos0/env", @@ -985,6 +984,7 @@ "github.com/urfave/negroni", "github.com/yadvendar/negroni-newrelic-go-agent", "github.com/zhouzhuojie/conditions", + "github.com/zhouzhuojie/withtimeout", "golang.org/x/net/netutil", "google.golang.org/api/option", "google.golang.org/grpc", diff --git a/docs/api_docs/bundle.yaml b/docs/api_docs/bundle.yaml index 0a92659c..7bbfa42b 100644 --- a/docs/api_docs/bundle.yaml +++ b/docs/api_docs/bundle.yaml @@ -783,6 +783,23 @@ paths: description: generic error response schema: $ref: '#/definitions/error' + /export/eval_cache/json: + get: + tags: + - export + operationId: getExportEvalCacheJSON + description: Export JSON format of the eval cache dump + produces: + - application/json + responses: + '200': + description: OK + schema: + type: object + default: + description: generic error response + schema: + $ref: '#/definitions/error' definitions: flag: type: object diff --git a/pkg/config/config.go b/pkg/config/config.go index b36980fe..3eb2eafc 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -14,6 +14,12 @@ import ( "github.com/sirupsen/logrus" ) +// EvalOnlyModeDBDrivers is a list of DBDrivers that we should only run in EvalOnlyMode. +var EvalOnlyModeDBDrivers = map[string]struct{}{ + "json_file": {}, + "json_http": {}, +} + // Global is the global dependency we can use, such as the new relic app instance var Global = struct { NewrelicApp newrelic.Application @@ -24,6 +30,7 @@ var Global = struct { func init() { env.Parse(&Config) + setupEvalOnlyMode() setupSentry() setupLogrus() setupStatsd() @@ -31,6 +38,12 @@ func init() { setupPrometheus() } +func setupEvalOnlyMode() { + if _, ok := EvalOnlyModeDBDrivers[Config.DBDriver]; ok { + Config.EvalOnlyMode = true + } +} + func setupLogrus() { l, err := logrus.ParseLevel(Config.LogrusLevel) if err != nil { diff --git a/pkg/config/env.go b/pkg/config/env.go index 13dc5630..214293ca 100644 --- a/pkg/config/env.go +++ b/pkg/config/env.go @@ -19,30 +19,44 @@ var Config = struct { // MiddlewareGzipEnabled - to enable gzip middleware MiddlewareGzipEnabled bool `env:"FLAGR_MIDDLEWARE_GZIP_ENABLED" envDefault:"true"` - // EvalLoggingEnabled - to enable the logging for eval results - EvalLoggingEnabled bool `env:"FLAGR_EVAL_LOGGING_ENABLED" envDefault:"true"` - // RateLimiterPerFlagPerSecondConsoleLogging - to rate limit the logging rate // per flag per second RateLimiterPerFlagPerSecondConsoleLogging int `env:"FLAGR_RATELIMITER_PERFLAG_PERSECOND_CONSOLE_LOGGING" envDefault:"100"` - // EvalCacheRefreshTimeout - timeout of getting the flags data from DB into the in-memory evaluation cache - EvalCacheRefreshTimeout time.Duration `env:"FLAGR_EVALCACHE_REFRESHTIMEOUT" envDefault:"59s"` - // EvalCacheRefreshInterval - time interval of getting the flags data from DB into the in-memory evaluation cache - EvalCacheRefreshInterval time.Duration `env:"FLAGR_EVALCACHE_REFRESHINTERVAL" envDefault:"3s"` // EvalEnableDebug - controls if we want to return evaluation debugging information back to the api requests // Note that this is a global switch: // if it's disabled, no evaluation debug info will be returned. // if it's enabled, it respects evaluation request's enableDebug field EvalDebugEnabled bool `env:"FLAGR_EVAL_DEBUG_ENABLED" envDefault:"true"` + // EvalLoggingEnabled - to enable the logging for eval results + EvalLoggingEnabled bool `env:"FLAGR_EVAL_LOGGING_ENABLED" envDefault:"true"` + // EvalCacheRefreshTimeout - timeout of getting the flags data from DB into the in-memory evaluation cache + EvalCacheRefreshTimeout time.Duration `env:"FLAGR_EVALCACHE_REFRESHTIMEOUT" envDefault:"59s"` + // EvalCacheRefreshInterval - time interval of getting the flags data from DB into the in-memory evaluation cache + EvalCacheRefreshInterval time.Duration `env:"FLAGR_EVALCACHE_REFRESHINTERVAL" envDefault:"3s"` + // EvalOnlyMode - will only expose the evaluation related endpoints. + // This field will be derived from DBDriver + EvalOnlyMode bool `env:"FLAGR_EVAL_ONLY_MODE" envDefault:"false"` + + /** + DBDriver and DBConnectionStr define how we can write and read flags data. + For databases, flagr supports sqlite3, mysql and postgres. + For read-only evaluation, flagr supports file and http. - // DBDriver - Flagr supports sqlite3, mysql, postgres - DBDriver string `env:"FLAGR_DB_DBDRIVER" envDefault:"sqlite3"` - // DBConnectionStr - examples - // sqlite3: "/tmp/file.db" - // sqlite3: ":memory:" - // mysql: "root:@tcp(127.0.0.1:18100)/flagr?parseTime=true" - // postgres: "host=myhost user=root dbname=flagr password=mypassword" + Examples: + + FLAGR_DB_DBDRIVER FLAGR_DB_DBCONNECTIONSTR + ================= =============================================================== + "sqlite3" "/tmp/file.db" + "sqlite3" ":memory:" + "mysql" "root:@tcp(127.0.0.1:18100)/flagr?parseTime=true" + "postgres" "host=myhost user=root dbname=flagr password=mypassword" + + "json_file" "/tmp/flags.json" # (it automatically sets EvalOnlyMode=true) + "json_http" "https://example.com/flags.json" # (it automatically sets EvalOnlyMode=true) + + */ + DBDriver string `env:"FLAGR_DB_DBDRIVER" envDefault:"sqlite3"` DBConnectionStr string `env:"FLAGR_DB_DBCONNECTIONSTR" envDefault:"flagr.sqlite"` // DBConnectionDebug controls whether to show the database connection debugging logs // warning: it may log the credentials to the stdout diff --git a/pkg/entity/flag.go b/pkg/entity/flag.go index 1822eca6..05a82afd 100644 --- a/pkg/entity/flag.go +++ b/pkg/entity/flag.go @@ -18,7 +18,7 @@ type Flag struct { Enabled bool Segments []Segment Variants []Variant - SnapshotID uint `json:"-"` + SnapshotID uint Notes string `sql:"type:text"` DataRecordsEnabled bool diff --git a/pkg/handler/eval_cache.go b/pkg/handler/eval_cache.go index bcec3a7c..21333f0e 100644 --- a/pkg/handler/eval_cache.go +++ b/pkg/handler/eval_cache.go @@ -7,7 +7,9 @@ import ( "github.com/checkr/flagr/pkg/config" "github.com/checkr/flagr/pkg/entity" "github.com/checkr/flagr/pkg/util" + "github.com/sirupsen/logrus" + "github.com/zhouzhuojie/withtimeout" ) var ( @@ -15,10 +17,13 @@ var ( singletonEvalCacheOnce sync.Once ) +type mapCache map[string]*entity.Flag + // EvalCache is the in-memory cache just for evaluation type EvalCache struct { - mapCache map[string]*entity.Flag mapCacheLock sync.RWMutex + idCache mapCache + keyCache mapCache refreshTimeout time.Duration refreshInterval time.Duration @@ -28,7 +33,8 @@ type EvalCache struct { var GetEvalCache = func() *EvalCache { singletonEvalCacheOnce.Do(func() { ec := &EvalCache{ - mapCache: make(map[string]*entity.Flag), + idCache: make(map[string]*entity.Flag), + keyCache: make(map[string]*entity.Flag), refreshTimeout: config.Config.EvalCacheRefreshTimeout, refreshInterval: config.Config.EvalCacheRefreshInterval, } @@ -56,17 +62,14 @@ func (ec *EvalCache) Start() { // GetByFlagKeyOrID gets the flag by Key or ID func (ec *EvalCache) GetByFlagKeyOrID(keyOrID interface{}) *entity.Flag { ec.mapCacheLock.RLock() - f := ec.mapCache[util.SafeString(keyOrID)] - ec.mapCacheLock.RUnlock() - return f -} + defer ec.mapCacheLock.RUnlock() -var fetchAllFlags = func() ([]entity.Flag, error) { - // Use eager loading to avoid N+1 problem - // doc: http://jinzhu.me/gorm/crud.html#preloading-eager-loading - fs := []entity.Flag{} - err := entity.PreloadSegmentsVariants(getDB()).Find(&fs).Error - return fs, err + s := util.SafeString(keyOrID) + f, ok := ec.idCache[s] + if !ok { + f = ec.keyCache[s] + } + return f } func (ec *EvalCache) reloadMapCache() error { @@ -74,30 +77,19 @@ func (ec *EvalCache) reloadMapCache() error { defer config.Global.NewrelicApp.StartTransaction("eval_cache_reload", nil, nil).End() } - fs, err := fetchAllFlags() - if err != nil { - return err - } - m := make(map[string]*entity.Flag) - for i := range fs { - ptr := &fs[i] - if ptr.ID != 0 { - m[util.SafeString(ptr.ID)] = ptr - } - if ptr.Key != "" { - m[ptr.Key] = ptr - } - } - - for _, f := range m { - err := f.PrepareEvaluation() + _, _, err := withtimeout.Do(ec.refreshTimeout, func() (interface{}, error) { + idCache, keyCache, err := ec.fetchAllFlags() if err != nil { - return err + return nil, err } - } - ec.mapCacheLock.Lock() - ec.mapCache = m - ec.mapCacheLock.Unlock() - return nil + ec.mapCacheLock.Lock() + defer ec.mapCacheLock.Unlock() + + ec.idCache = idCache + ec.keyCache = keyCache + return nil, err + }) + + return err } diff --git a/pkg/handler/eval_cache_fetcher.go b/pkg/handler/eval_cache_fetcher.go new file mode 100644 index 00000000..e30f963c --- /dev/null +++ b/pkg/handler/eval_cache_fetcher.go @@ -0,0 +1,140 @@ +package handler + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "github.com/checkr/flagr/pkg/config" + "github.com/checkr/flagr/pkg/entity" + "github.com/checkr/flagr/pkg/util" + "github.com/jinzhu/gorm" +) + +// EvalCacheJSON is the JSON serialization format of EvalCache's flags +type EvalCacheJSON struct { + Flags []entity.Flag +} + +func (ec *EvalCache) export() EvalCacheJSON { + fs := make([]entity.Flag, 0, len(ec.idCache)) + + ec.mapCacheLock.RLock() + defer ec.mapCacheLock.RUnlock() + + for _, f := range ec.idCache { + ff := *f + fs = append(fs, ff) + } + return EvalCacheJSON{Flags: fs} +} + +func (ec *EvalCache) fetchAllFlags() (idCache mapCache, keyCache mapCache, err error) { + fs, err := fetchAllFlags() + if err != nil { + return nil, nil, err + } + + idCache = make(map[string]*entity.Flag) + keyCache = make(map[string]*entity.Flag) + + for i := range fs { + f := &fs[i] + if err := f.PrepareEvaluation(); err != nil { + return nil, nil, err + } + + if f.ID != 0 { + idCache[util.SafeString(f.ID)] = f + } + if f.Key != "" { + keyCache[f.Key] = f + } + } + return idCache, keyCache, nil +} + +type evalCacheFetcher interface { + fetch() ([]entity.Flag, error) +} + +func newFetcher() (evalCacheFetcher, error) { + if !config.Config.EvalOnlyMode { + return &dbFetcher{db: getDB()}, nil + } + + switch config.Config.DBDriver { + case "json_file": + return &jsonFileFetcher{filePath: config.Config.DBConnectionStr}, nil + case "json_http": + return &jsonHTTPFetcher{url: config.Config.DBConnectionStr}, nil + default: + return nil, fmt.Errorf( + "failed to create evaluation cache fetcher. DBDriver:%s is not supported", + config.Config.DBDriver, + ) + } +} + +var fetchAllFlags = func() ([]entity.Flag, error) { + fetcher, err := newFetcher() + if err != nil { + return nil, err + } + return fetcher.fetch() +} + +type jsonFileFetcher struct { + filePath string +} + +func (ff *jsonFileFetcher) fetch() ([]entity.Flag, error) { + b, err := ioutil.ReadFile(ff.filePath) + if err != nil { + return nil, err + } + ecj := &EvalCacheJSON{} + err = json.Unmarshal(b, ecj) + if err != nil { + return nil, err + } + return ecj.Flags, nil +} + +type jsonHTTPFetcher struct { + url string +} + +func (hf *jsonHTTPFetcher) fetch() ([]entity.Flag, error) { + client := http.Client{Timeout: config.Config.EvalCacheRefreshTimeout} + res, err := client.Get(hf.url) + if err != nil { + return nil, err + } + defer res.Body.Close() + + b, err := ioutil.ReadAll(res.Body) + if err != nil { + return nil, err + } + + ecj := &EvalCacheJSON{} + err = json.Unmarshal(b, ecj) + if err != nil { + return nil, err + } + return ecj.Flags, nil +} + +type dbFetcher struct { + db *gorm.DB +} + +func (df *dbFetcher) fetch() ([]entity.Flag, error) { + // Use eager loading to avoid N+1 problem + // doc: http://jinzhu.me/gorm/crud.html#preloading-eager-loading + fs := []entity.Flag{} + err := entity.PreloadSegmentsVariants(df.db).Find(&fs).Error + return fs, err +} diff --git a/pkg/handler/eval_cache_fetcher_test.go b/pkg/handler/eval_cache_fetcher_test.go new file mode 100644 index 00000000..731f2653 --- /dev/null +++ b/pkg/handler/eval_cache_fetcher_test.go @@ -0,0 +1,100 @@ +package handler + +import ( + "io" + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" + + "github.com/checkr/flagr/pkg/config" + "github.com/stretchr/testify/assert" +) + +func TestJSONFileFetcher(t *testing.T) { + t.Run("happy code path", func(t *testing.T) { + jff := &jsonFileFetcher{filePath: "./testdata/sample_eval_cache.json"} + fs, err := jff.fetch() + assert.NoError(t, err) + assert.NotZero(t, len(fs)) + }) + + t.Run("non-exists file path", func(t *testing.T) { + jff := &jsonFileFetcher{filePath: "./testdata/non-exists.json"} + fs, err := jff.fetch() + assert.Error(t, err) + assert.Zero(t, fs) + }) +} + +func TestJSONHTTPFetcher(t *testing.T) { + t.Run("happy code path", func(t *testing.T) { + h := func(w http.ResponseWriter, r *http.Request) { + b, _ := ioutil.ReadFile("./testdata/sample_eval_cache.json") + io.WriteString(w, string(b)) + } + + server := httptest.NewServer(http.HandlerFunc(h)) + defer server.Close() + + jhf := &jsonHTTPFetcher{url: server.URL} + fs, err := jhf.fetch() + assert.NoError(t, err) + assert.NotZero(t, len(fs)) + }) + + t.Run("non-exists file path", func(t *testing.T) { + jhf := &jsonHTTPFetcher{url: "http://invalid-url"} + fs, err := jhf.fetch() + assert.Error(t, err) + assert.Zero(t, len(fs)) + }) +} + +func setDBDriverConfig(driver string, evalOnlyMode bool) (reset func()) { + old := config.Config + config.Config.DBDriver = driver + config.Config.EvalOnlyMode = evalOnlyMode + + return func() { + config.Config = old + } +} + +func TestNewFetcher(t *testing.T) { + t.Run("regular db", func(t *testing.T) { + reset := setDBDriverConfig("sqlite3", false) + defer reset() + + fetcher, err := newFetcher() + assert.NoError(t, err) + assert.NotNil(t, fetcher) + }) + + t.Run("json file", func(t *testing.T) { + reset := setDBDriverConfig("json_file", true) + defer reset() + + fetcher, err := newFetcher() + assert.NoError(t, err) + assert.NotNil(t, fetcher) + }) + + t.Run("json http", func(t *testing.T) { + reset := setDBDriverConfig("json_http", true) + defer reset() + + fetcher, err := newFetcher() + assert.NoError(t, err) + assert.NotNil(t, fetcher) + }) + + t.Run("invalid driver", func(t *testing.T) { + reset := setDBDriverConfig("invalid_driver", true) + defer reset() + + fetcher, err := newFetcher() + assert.Error(t, err) + assert.Nil(t, fetcher) + }) +} diff --git a/pkg/handler/eval_test.go b/pkg/handler/eval_test.go index fe3a16ba..78f91335 100644 --- a/pkg/handler/eval_test.go +++ b/pkg/handler/eval_test.go @@ -163,7 +163,7 @@ func TestEvalFlag(t *testing.T) { }, } f.PrepareEvaluation() - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, @@ -183,7 +183,7 @@ func TestEvalFlag(t *testing.T) { f.Segments[0].RolloutPercent = uint(0) f.PrepareEvaluation() - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, @@ -215,7 +215,7 @@ func TestEvalFlag(t *testing.T) { }, } f.PrepareEvaluation() - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, @@ -231,7 +231,7 @@ func TestEvalFlag(t *testing.T) { t.Run("test enabled=false", func(t *testing.T) { f := entity.GenFixtureFlag() f.Enabled = false - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, @@ -248,7 +248,7 @@ func TestEvalFlag(t *testing.T) { t.Run("empty entityType case", func(t *testing.T) { f := entity.GenFixtureFlag() f.EntityType = "" - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, @@ -264,7 +264,7 @@ func TestEvalFlag(t *testing.T) { t.Run("override case", func(t *testing.T) { f := entity.GenFixtureFlag() f.EntityType = "some_entity_type" - cache := &EvalCache{mapCache: map[string]*entity.Flag{"100": &f}} + cache := &EvalCache{idCache: map[string]*entity.Flag{"100": &f}} defer gostub.StubFunc(&GetEvalCache, cache).Reset() result := evalFlag(models.EvalContext{ EnableDebug: true, diff --git a/pkg/handler/export.go b/pkg/handler/export.go index d51bf1bc..334f2ef8 100644 --- a/pkg/handler/export.go +++ b/pkg/handler/export.go @@ -93,3 +93,9 @@ var exportFlagEntityTypes = func(tmpDB *gorm.DB) error { logrus.WithField("count", len(ts)).Debugf("export flag entity types") return nil } + +var exportEvalCacheJSONHandler = func(export.GetExportEvalCacheJSONParams) middleware.Responder { + return export.NewGetExportEvalCacheJSONOK().WithPayload( + GetEvalCache().export(), + ) +} diff --git a/pkg/handler/export_test.go b/pkg/handler/export_test.go index fc805100..7dc30983 100644 --- a/pkg/handler/export_test.go +++ b/pkg/handler/export_test.go @@ -92,3 +92,18 @@ func TestExportSQLiteHandler(t *testing.T) { assert.IsType(t, res.(*export.GetExportSqliteDefault), res) }) } + +func TestExportEvalCacheJSONHandler(t *testing.T) { + fixtureFlag := entity.GenFixtureFlag() + db := entity.PopulateTestDB(fixtureFlag) + defer db.Close() + defer gostub.StubFunc(&getDB, db).Reset() + + ec := GetEvalCache() + ec.reloadMapCache() + + t.Run("happy code path", func(t *testing.T) { + res := exportEvalCacheJSONHandler(export.GetExportEvalCacheJSONParams{}) + assert.IsType(t, res.(*export.GetExportEvalCacheJSONOK), res) + }) +} diff --git a/pkg/handler/fixture.go b/pkg/handler/fixture.go index 25e31c49..0a3798ce 100644 --- a/pkg/handler/fixture.go +++ b/pkg/handler/fixture.go @@ -9,6 +9,7 @@ import ( func GenFixtureEvalCache() *EvalCache { f := entity.GenFixtureFlag() return &EvalCache{ - mapCache: map[string]*entity.Flag{util.SafeString(f.ID): &f, f.Key: &f}, + idCache: map[string]*entity.Flag{util.SafeString(f.ID): &f}, + keyCache: map[string]*entity.Flag{f.Key: &f}, } } diff --git a/pkg/handler/handler.go b/pkg/handler/handler.go index 50610084..f6d1ff31 100644 --- a/pkg/handler/handler.go +++ b/pkg/handler/handler.go @@ -19,9 +19,15 @@ var getDB = entity.GetDB // Setup initialize all the handler functions func Setup(api *operations.FlagrAPI) { - setupCRUD(api) - setupEvaluation(api) + if config.Config.EvalOnlyMode { + setupHealth(api) + setupEvaluation(api) + return + } + setupHealth(api) + setupEvaluation(api) + setupCRUD(api) setupExport(api) } @@ -83,4 +89,5 @@ func setupHealth(api *operations.FlagrAPI) { func setupExport(api *operations.FlagrAPI) { api.ExportGetExportSqliteHandler = export.GetExportSqliteHandlerFunc(exportSQLiteHandler) + api.ExportGetExportEvalCacheJSONHandler = export.GetExportEvalCacheJSONHandlerFunc(exportEvalCacheJSONHandler) } diff --git a/pkg/handler/testdata/sample_eval_cache.json b/pkg/handler/testdata/sample_eval_cache.json new file mode 100644 index 00000000..777da4ee --- /dev/null +++ b/pkg/handler/testdata/sample_eval_cache.json @@ -0,0 +1,200 @@ +{ + "Flags": [ + { + "ID": 1, + "CreatedAt": "2018-10-05T23:10:25.234392-07:00", + "UpdatedAt": "2019-04-15T23:02:08.034382-07:00", + "DeletedAt": null, + "Key": "kmmcd1nsd6", + "Description": "demo_example123456", + "CreatedBy": "", + "UpdatedBy": "", + "Enabled": true, + "Segments": [ + { + "ID": 6, + "CreatedAt": "2019-03-19T13:55:16.281094-07:00", + "UpdatedAt": "2019-04-15T23:02:08.03487-07:00", + "DeletedAt": null, + "FlagID": 1, + "Description": "2", + "Rank": 0, + "RolloutPercent": 100, + "Constraints": [ + { + "ID": 3, + "CreatedAt": "2019-04-12T17:45:11.955157-07:00", + "UpdatedAt": "2019-04-15T23:02:08.035078-07:00", + "DeletedAt": null, + "SegmentID": 6, + "Property": "env", + "Operator": "EQ", + "Value": "\"local\"" + } + ], + "Distributions": [ + { + "ID": 7, + "CreatedAt": "2019-03-19T13:55:21.884681-07:00", + "UpdatedAt": "2019-04-15T23:02:08.035612-07:00", + "DeletedAt": null, + "SegmentID": 6, + "VariantID": 2, + "VariantKey": "blue123", + "Percent": 100 + } + ] + }, + { + "ID": 1, + "CreatedAt": "2018-10-05T23:11:14.908074-07:00", + "UpdatedAt": "2019-04-15T23:02:08.03662-07:00", + "DeletedAt": null, + "FlagID": 1, + "Description": "Users in CA", + "Rank": 1, + "RolloutPercent": 100, + "Constraints": [], + "Distributions": [ + { + "ID": 1, + "CreatedAt": "2018-10-05T23:11:38.852941-07:00", + "UpdatedAt": "2019-04-15T23:02:08.036756-07:00", + "DeletedAt": null, + "SegmentID": 1, + "VariantID": 2, + "VariantKey": "blue123", + "Percent": 50 + }, + { + "ID": 2, + "CreatedAt": "2018-10-05T23:11:38.853333-07:00", + "UpdatedAt": "2019-04-15T23:02:08.036888-07:00", + "DeletedAt": null, + "SegmentID": 1, + "VariantID": 3, + "VariantKey": "red", + "Percent": 50 + } + ] + } + ], + "Variants": [ + { + "ID": 1, + "CreatedAt": "2018-10-05T23:10:44.066035-07:00", + "UpdatedAt": "2019-04-15T23:02:08.036989-07:00", + "DeletedAt": null, + "FlagID": 1, + "Key": "control222", + "Attachment": {} + }, + { + "ID": 2, + "CreatedAt": "2018-10-05T23:10:46.775635-07:00", + "UpdatedAt": "2019-04-15T23:02:08.037348-07:00", + "DeletedAt": null, + "FlagID": 1, + "Key": "blue123", + "Attachment": { + "1": "12345" + } + }, + { + "ID": 3, + "CreatedAt": "2018-10-05T23:10:49.496043-07:00", + "UpdatedAt": "2019-04-15T23:02:08.037487-07:00", + "DeletedAt": null, + "FlagID": 1, + "Key": "red", + "Attachment": {} + } + ], + "SnapshotID": 69, + "Notes": "", + "DataRecordsEnabled": true, + "EntityType": "a123" + }, + { + "ID": 2, + "CreatedAt": "2019-02-12T17:06:45.28157-08:00", + "UpdatedAt": "2019-03-25T18:22:17.846004-07:00", + "DeletedAt": null, + "Key": "knkrkxnvfh8nk8aw4", + "Description": "demo_2", + "CreatedBy": "", + "UpdatedBy": "", + "Enabled": true, + "Segments": [ + { + "ID": 5, + "CreatedAt": "2019-02-12T17:13:02.022024-08:00", + "UpdatedAt": "2019-03-25T18:22:17.846264-07:00", + "DeletedAt": null, + "FlagID": 2, + "Description": "segment1", + "Rank": 999, + "RolloutPercent": 100, + "Constraints": [ + { + "ID": 2, + "CreatedAt": "2019-02-12T17:13:12.789073-08:00", + "UpdatedAt": "2019-03-25T18:22:17.846461-07:00", + "DeletedAt": null, + "SegmentID": 5, + "Property": "state", + "Operator": "EQ", + "Value": "\"CA\"" + } + ], + "Distributions": [ + { + "ID": 5, + "CreatedAt": "2019-02-12T17:13:24.846633-08:00", + "UpdatedAt": "2019-03-25T18:22:17.846655-07:00", + "DeletedAt": null, + "SegmentID": 5, + "VariantID": 4, + "VariantKey": "control", + "Percent": 50 + }, + { + "ID": 6, + "CreatedAt": "2019-02-12T17:13:24.849219-08:00", + "UpdatedAt": "2019-03-25T18:22:17.847076-07:00", + "DeletedAt": null, + "SegmentID": 5, + "VariantID": 5, + "VariantKey": "treatment", + "Percent": 50 + } + ] + } + ], + "Variants": [ + { + "ID": 4, + "CreatedAt": "2019-02-12T17:12:47.918306-08:00", + "UpdatedAt": "2019-03-25T18:22:17.847214-07:00", + "DeletedAt": null, + "FlagID": 2, + "Key": "control", + "Attachment": {} + }, + { + "ID": 5, + "CreatedAt": "2019-02-12T17:12:52.506251-08:00", + "UpdatedAt": "2019-03-25T18:22:17.847426-07:00", + "DeletedAt": null, + "FlagID": 2, + "Key": "treatment", + "Attachment": {} + } + ], + "SnapshotID": 60, + "Notes": "123", + "DataRecordsEnabled": false, + "EntityType": "" + } + ] +} diff --git a/swagger/export_eval_cache_json.yaml b/swagger/export_eval_cache_json.yaml new file mode 100644 index 00000000..120cbf48 --- /dev/null +++ b/swagger/export_eval_cache_json.yaml @@ -0,0 +1,16 @@ +get: + tags: + - export + operationId: getExportEvalCacheJSON + description: Export JSON format of the eval cache dump + produces: + - application/json + responses: + 200: + description: OK + schema: + type: object + default: + description: generic error response + schema: + $ref: "#/definitions/error" diff --git a/swagger/index.yaml b/swagger/index.yaml index 312f815d..decd3a31 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -80,6 +80,8 @@ paths: $ref: ./health.yaml /export/sqlite: $ref: ./export_sqlite.yaml + /export/eval_cache/json: + $ref: ./export_eval_cache_json.yaml definitions: diff --git a/swagger_gen/restapi/embedded_spec.go b/swagger_gen/restapi/embedded_spec.go index 30d5814f..5c1285b1 100644 --- a/swagger_gen/restapi/embedded_spec.go +++ b/swagger_gen/restapi/embedded_spec.go @@ -101,6 +101,32 @@ func init() { } } }, + "/export/eval_cache/json": { + "get": { + "description": "Export JSON format of the eval cache dump", + "produces": [ + "application/json" + ], + "tags": [ + "export" + ], + "operationId": "getExportEvalCacheJSON", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object" + } + }, + "default": { + "description": "generic error response", + "schema": { + "$ref": "#/definitions/error" + } + } + } + } + }, "/export/sqlite": { "get": { "description": "Export sqlite3 format of the db dump, which is converted from the main database.", @@ -1842,6 +1868,32 @@ func init() { } } }, + "/export/eval_cache/json": { + "get": { + "description": "Export JSON format of the eval cache dump", + "produces": [ + "application/json" + ], + "tags": [ + "export" + ], + "operationId": "getExportEvalCacheJSON", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object" + } + }, + "default": { + "description": "generic error response", + "schema": { + "$ref": "#/definitions/error" + } + } + } + } + }, "/export/sqlite": { "get": { "description": "Export sqlite3 format of the db dump, which is converted from the main database.", diff --git a/swagger_gen/restapi/operations/export/get_export_eval_cache_json.go b/swagger_gen/restapi/operations/export/get_export_eval_cache_json.go new file mode 100644 index 00000000..73ae7b8c --- /dev/null +++ b/swagger_gen/restapi/operations/export/get_export_eval_cache_json.go @@ -0,0 +1,58 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package export + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the generate command + +import ( + "net/http" + + middleware "github.com/go-openapi/runtime/middleware" +) + +// GetExportEvalCacheJSONHandlerFunc turns a function with the right signature into a get export eval cache JSON handler +type GetExportEvalCacheJSONHandlerFunc func(GetExportEvalCacheJSONParams) middleware.Responder + +// Handle executing the request and returning a response +func (fn GetExportEvalCacheJSONHandlerFunc) Handle(params GetExportEvalCacheJSONParams) middleware.Responder { + return fn(params) +} + +// GetExportEvalCacheJSONHandler interface for that can handle valid get export eval cache JSON params +type GetExportEvalCacheJSONHandler interface { + Handle(GetExportEvalCacheJSONParams) middleware.Responder +} + +// NewGetExportEvalCacheJSON creates a new http.Handler for the get export eval cache JSON operation +func NewGetExportEvalCacheJSON(ctx *middleware.Context, handler GetExportEvalCacheJSONHandler) *GetExportEvalCacheJSON { + return &GetExportEvalCacheJSON{Context: ctx, Handler: handler} +} + +/*GetExportEvalCacheJSON swagger:route GET /export/eval_cache/json export getExportEvalCacheJson + +Export JSON format of the eval cache dump + +*/ +type GetExportEvalCacheJSON struct { + Context *middleware.Context + Handler GetExportEvalCacheJSONHandler +} + +func (o *GetExportEvalCacheJSON) ServeHTTP(rw http.ResponseWriter, r *http.Request) { + route, rCtx, _ := o.Context.RouteInfo(r) + if rCtx != nil { + r = rCtx + } + var Params = NewGetExportEvalCacheJSONParams() + + if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params + o.Context.Respond(rw, r, route.Produces, route, err) + return + } + + res := o.Handler.Handle(Params) // actually handle the request + + o.Context.Respond(rw, r, route.Produces, route, res) + +} diff --git a/swagger_gen/restapi/operations/export/get_export_eval_cache_json_parameters.go b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_parameters.go new file mode 100644 index 00000000..2cf988af --- /dev/null +++ b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_parameters.go @@ -0,0 +1,45 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package export + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "net/http" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime/middleware" +) + +// NewGetExportEvalCacheJSONParams creates a new GetExportEvalCacheJSONParams object +// no default values defined in spec. +func NewGetExportEvalCacheJSONParams() GetExportEvalCacheJSONParams { + + return GetExportEvalCacheJSONParams{} +} + +// GetExportEvalCacheJSONParams contains all the bound params for the get export eval cache JSON operation +// typically these are obtained from a http.Request +// +// swagger:parameters getExportEvalCacheJSON +type GetExportEvalCacheJSONParams struct { + + // HTTP Request Object + HTTPRequest *http.Request `json:"-"` +} + +// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface +// for simple values it will use straight method calls. +// +// To ensure default values, the struct must have been initialized with NewGetExportEvalCacheJSONParams() beforehand. +func (o *GetExportEvalCacheJSONParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error { + var res []error + + o.HTTPRequest = r + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/swagger_gen/restapi/operations/export/get_export_eval_cache_json_responses.go b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_responses.go new file mode 100644 index 00000000..a11d43e6 --- /dev/null +++ b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_responses.go @@ -0,0 +1,115 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package export + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "net/http" + + "github.com/go-openapi/runtime" + + models "github.com/checkr/flagr/swagger_gen/models" +) + +// GetExportEvalCacheJSONOKCode is the HTTP code returned for type GetExportEvalCacheJSONOK +const GetExportEvalCacheJSONOKCode int = 200 + +/*GetExportEvalCacheJSONOK OK + +swagger:response getExportEvalCacheJsonOK +*/ +type GetExportEvalCacheJSONOK struct { + + /* + In: Body + */ + Payload interface{} `json:"body,omitempty"` +} + +// NewGetExportEvalCacheJSONOK creates GetExportEvalCacheJSONOK with default headers values +func NewGetExportEvalCacheJSONOK() *GetExportEvalCacheJSONOK { + + return &GetExportEvalCacheJSONOK{} +} + +// WithPayload adds the payload to the get export eval cache Json o k response +func (o *GetExportEvalCacheJSONOK) WithPayload(payload interface{}) *GetExportEvalCacheJSONOK { + o.Payload = payload + return o +} + +// SetPayload sets the payload to the get export eval cache Json o k response +func (o *GetExportEvalCacheJSONOK) SetPayload(payload interface{}) { + o.Payload = payload +} + +// WriteResponse to the client +func (o *GetExportEvalCacheJSONOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.WriteHeader(200) + payload := o.Payload + if err := producer.Produce(rw, payload); err != nil { + panic(err) // let the recovery middleware deal with this + } + +} + +/*GetExportEvalCacheJSONDefault generic error response + +swagger:response getExportEvalCacheJsonDefault +*/ +type GetExportEvalCacheJSONDefault struct { + _statusCode int + + /* + In: Body + */ + Payload *models.Error `json:"body,omitempty"` +} + +// NewGetExportEvalCacheJSONDefault creates GetExportEvalCacheJSONDefault with default headers values +func NewGetExportEvalCacheJSONDefault(code int) *GetExportEvalCacheJSONDefault { + if code <= 0 { + code = 500 + } + + return &GetExportEvalCacheJSONDefault{ + _statusCode: code, + } +} + +// WithStatusCode adds the status to the get export eval cache JSON default response +func (o *GetExportEvalCacheJSONDefault) WithStatusCode(code int) *GetExportEvalCacheJSONDefault { + o._statusCode = code + return o +} + +// SetStatusCode sets the status to the get export eval cache JSON default response +func (o *GetExportEvalCacheJSONDefault) SetStatusCode(code int) { + o._statusCode = code +} + +// WithPayload adds the payload to the get export eval cache JSON default response +func (o *GetExportEvalCacheJSONDefault) WithPayload(payload *models.Error) *GetExportEvalCacheJSONDefault { + o.Payload = payload + return o +} + +// SetPayload sets the payload to the get export eval cache JSON default response +func (o *GetExportEvalCacheJSONDefault) SetPayload(payload *models.Error) { + o.Payload = payload +} + +// WriteResponse to the client +func (o *GetExportEvalCacheJSONDefault) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.WriteHeader(o._statusCode) + if o.Payload != nil { + payload := o.Payload + if err := producer.Produce(rw, payload); err != nil { + panic(err) // let the recovery middleware deal with this + } + } +} diff --git a/swagger_gen/restapi/operations/export/get_export_eval_cache_json_urlbuilder.go b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_urlbuilder.go new file mode 100644 index 00000000..badd45a8 --- /dev/null +++ b/swagger_gen/restapi/operations/export/get_export_eval_cache_json_urlbuilder.go @@ -0,0 +1,87 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package export + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the generate command + +import ( + "errors" + "net/url" + golangswaggerpaths "path" +) + +// GetExportEvalCacheJSONURL generates an URL for the get export eval cache JSON operation +type GetExportEvalCacheJSONURL struct { + _basePath string +} + +// WithBasePath sets the base path for this url builder, only required when it's different from the +// base path specified in the swagger spec. +// When the value of the base path is an empty string +func (o *GetExportEvalCacheJSONURL) WithBasePath(bp string) *GetExportEvalCacheJSONURL { + o.SetBasePath(bp) + return o +} + +// SetBasePath sets the base path for this url builder, only required when it's different from the +// base path specified in the swagger spec. +// When the value of the base path is an empty string +func (o *GetExportEvalCacheJSONURL) SetBasePath(bp string) { + o._basePath = bp +} + +// Build a url path and query string +func (o *GetExportEvalCacheJSONURL) Build() (*url.URL, error) { + var result url.URL + + var _path = "/export/eval_cache/json" + + _basePath := o._basePath + if _basePath == "" { + _basePath = "/api/v1" + } + result.Path = golangswaggerpaths.Join(_basePath, _path) + + return &result, nil +} + +// Must is a helper function to panic when the url builder returns an error +func (o *GetExportEvalCacheJSONURL) Must(u *url.URL, err error) *url.URL { + if err != nil { + panic(err) + } + if u == nil { + panic("url can't be nil") + } + return u +} + +// String returns the string representation of the path with query string +func (o *GetExportEvalCacheJSONURL) String() string { + return o.Must(o.Build()).String() +} + +// BuildFull builds a full url with scheme, host, path and query string +func (o *GetExportEvalCacheJSONURL) BuildFull(scheme, host string) (*url.URL, error) { + if scheme == "" { + return nil, errors.New("scheme is required for a full url on GetExportEvalCacheJSONURL") + } + if host == "" { + return nil, errors.New("host is required for a full url on GetExportEvalCacheJSONURL") + } + + base, err := o.Build() + if err != nil { + return nil, err + } + + base.Scheme = scheme + base.Host = host + return base, nil +} + +// StringFull returns the string representation of a complete url +func (o *GetExportEvalCacheJSONURL) StringFull(scheme, host string) string { + return o.Must(o.BuildFull(scheme, host)).String() +} diff --git a/swagger_gen/restapi/operations/flagr_api.go b/swagger_gen/restapi/operations/flagr_api.go index 5418ac50..1d1ff59c 100644 --- a/swagger_gen/restapi/operations/flagr_api.go +++ b/swagger_gen/restapi/operations/flagr_api.go @@ -86,6 +86,9 @@ func NewFlagrAPI(spec *loads.Document) *FlagrAPI { VariantFindVariantsHandler: variant.FindVariantsHandlerFunc(func(params variant.FindVariantsParams) middleware.Responder { return middleware.NotImplemented("operation VariantFindVariants has not yet been implemented") }), + ExportGetExportEvalCacheJSONHandler: export.GetExportEvalCacheJSONHandlerFunc(func(params export.GetExportEvalCacheJSONParams) middleware.Responder { + return middleware.NotImplemented("operation ExportGetExportEvalCacheJSON has not yet been implemented") + }), ExportGetExportSqliteHandler: export.GetExportSqliteHandlerFunc(func(params export.GetExportSqliteParams) middleware.Responder { return middleware.NotImplemented("operation ExportGetExportSqlite has not yet been implemented") }), @@ -188,6 +191,8 @@ type FlagrAPI struct { SegmentFindSegmentsHandler segment.FindSegmentsHandler // VariantFindVariantsHandler sets the operation handler for the find variants operation VariantFindVariantsHandler variant.FindVariantsHandler + // ExportGetExportEvalCacheJSONHandler sets the operation handler for the get export eval cache JSON operation + ExportGetExportEvalCacheJSONHandler export.GetExportEvalCacheJSONHandler // ExportGetExportSqliteHandler sets the operation handler for the get export sqlite operation ExportGetExportSqliteHandler export.GetExportSqliteHandler // FlagGetFlagHandler sets the operation handler for the get flag operation @@ -335,6 +340,10 @@ func (o *FlagrAPI) Validate() error { unregistered = append(unregistered, "variant.FindVariantsHandler") } + if o.ExportGetExportEvalCacheJSONHandler == nil { + unregistered = append(unregistered, "export.GetExportEvalCacheJSONHandler") + } + if o.ExportGetExportSqliteHandler == nil { unregistered = append(unregistered, "export.GetExportSqliteHandler") } @@ -557,6 +566,11 @@ func (o *FlagrAPI) initHandlerCache() { } o.handlers["GET"]["/flags/{flagID}/variants"] = variant.NewFindVariants(o.context, o.VariantFindVariantsHandler) + if o.handlers["GET"] == nil { + o.handlers["GET"] = make(map[string]http.Handler) + } + o.handlers["GET"]["/export/eval_cache/json"] = export.NewGetExportEvalCacheJSON(o.context, o.ExportGetExportEvalCacheJSONHandler) + if o.handlers["GET"] == nil { o.handlers["GET"] = make(map[string]http.Handler) } diff --git a/vendor/github.com/bouk/monkey/LICENSE.md b/vendor/github.com/bouk/monkey/LICENSE.md deleted file mode 100644 index c591ea09..00000000 --- a/vendor/github.com/bouk/monkey/LICENSE.md +++ /dev/null @@ -1,3 +0,0 @@ -Copyright Bouke van der Bijl - -I do not give anyone permissions to use this tool for any purpose. Don't use it. diff --git a/vendor/github.com/bouk/monkey/README.md b/vendor/github.com/bouk/monkey/README.md deleted file mode 100644 index bc25a5aa..00000000 --- a/vendor/github.com/bouk/monkey/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# Go monkeypatching :monkey_face: :monkey: - -Actual arbitrary monkeypatching for Go. Yes really. - -Read this blogpost for an explanation on how it works: http://bouk.co/blog/monkey-patching-in-go/ - -## I thought that monkeypatching in Go is impossible? - -It's not possible through regular language constructs, but we can always bend computers to our will! Monkey implements monkeypatching by rewriting the running executable at runtime and inserting a jump to the function you want called instead. **This is as unsafe as it sounds and I don't recommend anyone do it outside of a testing environment.** - -Make sure you read the notes at the bottom of the README if you intend to use this library. - -## Using monkey - -Monkey's API is very simple and straightfoward. Call `monkey.Patch(, )` to replace a function. For example: - -```go -package main - -import ( - "fmt" - "os" - "strings" - - "github.com/bouk/monkey" -) - -func main() { - monkey.Patch(fmt.Println, func(a ...interface{}) (n int, err error) { - s := make([]interface{}, len(a)) - for i, v := range a { - s[i] = strings.Replace(fmt.Sprint(v), "hell", "*bleep*", -1) - } - return fmt.Fprintln(os.Stdout, s...) - }) - fmt.Println("what the hell?") // what the *bleep*? -} -``` - -You can then call `monkey.Unpatch()` to unpatch the method again. The replacement function can be any function value, whether it's anonymous, bound or otherwise. - -If you want to patch an instance method you need to use `monkey.PatchInstanceMethod(, , )`. You get the type by using `reflect.TypeOf`, and your replacement function simply takes the instance as the first argument. To disable all network connections, you can do as follows for example: - -```go -package main - -import ( - "fmt" - "net" - "net/http" - "reflect" - - "github.com/bouk/monkey" -) - -func main() { - var d *net.Dialer // Has to be a pointer to because `Dial` has a pointer receiver - monkey.PatchInstanceMethod(reflect.TypeOf(d), "Dial", func(_ *net.Dialer, _, _ string) (net.Conn, error) { - return nil, fmt.Errorf("no dialing allowed") - }) - _, err := http.Get("http://google.com") - fmt.Println(err) // Get http://google.com: no dialing allowed -} - -``` - -Note that patching the method for just one instance is currently not possible, `PatchInstanceMethod` will patch it for all instances. Don't bother trying `monkey.Patch(instance.Method, replacement)`, it won't work. `monkey.UnpatchInstanceMethod(, )` will undo `PatchInstanceMethod`. - -If you want to remove all currently applied monkeypatches simply call `monkey.UnpatchAll`. This could be useful in a test teardown function. - -If you want to call the original function from within the replacement you need to use a `monkey.PatchGuard`. A patchguard allows you to easily remove and restore the patch so you can call the original function. For example: - -```go -package main - -import ( - "fmt" - "net/http" - "reflect" - "strings" - - "github.com/bouk/monkey" -) - -func main() { - var guard *monkey.PatchGuard - guard = monkey.PatchInstanceMethod(reflect.TypeOf(http.DefaultClient), "Get", func(c *http.Client, url string) (*http.Response, error) { - guard.Unpatch() - defer guard.Restore() - - if !strings.HasPrefix(url, "https://") { - return nil, fmt.Errorf("only https requests allowed") - } - - return c.Get(url) - }) - - _, err := http.Get("http://google.com") - fmt.Println(err) // only https requests allowed - resp, err := http.Get("https://google.com") - fmt.Println(resp.Status, err) // 200 OK -} -``` - -## Notes - -1. Monkey sometimes fails to patch a function if inlining is enabled. Try running your tests with inlining disabled, for example: `go test -gcflags=-l`. The same command line argument can also be used for build. -2. Monkey won't work on some security-oriented operating system that don't allow memory pages to be both write and execute at the same time. With the current approach there's not really a reliable fix for this. -3. Monkey is not threadsafe. Or any kind of safe. -4. I've tested monkey on OSX 10.10.2 and Ubuntu 14.04. It should work on any unix-based x86 or x86-64 system. - -© Bouke van der Bijl diff --git a/vendor/github.com/bouk/monkey/circle.yml b/vendor/github.com/bouk/monkey/circle.yml deleted file mode 100644 index 0647bfea..00000000 --- a/vendor/github.com/bouk/monkey/circle.yml +++ /dev/null @@ -1,3 +0,0 @@ -test: - override: - - script/test diff --git a/vendor/github.com/bouk/monkey/monkey.go b/vendor/github.com/bouk/monkey/monkey.go deleted file mode 100644 index 209f3807..00000000 --- a/vendor/github.com/bouk/monkey/monkey.go +++ /dev/null @@ -1,133 +0,0 @@ -package monkey - -import ( - "fmt" - "reflect" - "sync" - "unsafe" -) - -// patch is an applied patch -// needed to undo a patch -type patch struct { - originalBytes []byte - replacement *reflect.Value -} - -var ( - lock = sync.Mutex{} - - patches = make(map[reflect.Value]patch) -) - -type value struct { - _ uintptr - ptr unsafe.Pointer -} - -func getPtr(v reflect.Value) unsafe.Pointer { - return (*value)(unsafe.Pointer(&v)).ptr -} - -type PatchGuard struct { - target reflect.Value - replacement reflect.Value -} - -func (g *PatchGuard) Unpatch() { - unpatchValue(g.target) -} - -func (g *PatchGuard) Restore() { - patchValue(g.target, g.replacement) -} - -// Patch replaces a function with another -func Patch(target, replacement interface{}) *PatchGuard { - t := reflect.ValueOf(target) - r := reflect.ValueOf(replacement) - patchValue(t, r) - - return &PatchGuard{t, r} -} - -// PatchInstanceMethod replaces an instance method methodName for the type target with replacement -// Replacement should expect the receiver (of type target) as the first argument -func PatchInstanceMethod(target reflect.Type, methodName string, replacement interface{}) *PatchGuard { - m, ok := target.MethodByName(methodName) - if !ok { - panic(fmt.Sprintf("unknown method %s", methodName)) - } - r := reflect.ValueOf(replacement) - patchValue(m.Func, r) - - return &PatchGuard{m.Func, r} -} - -func patchValue(target, replacement reflect.Value) { - lock.Lock() - defer lock.Unlock() - - if target.Kind() != reflect.Func { - panic("target has to be a Func") - } - - if replacement.Kind() != reflect.Func { - panic("replacement has to be a Func") - } - - if target.Type() != replacement.Type() { - panic(fmt.Sprintf("target and replacement have to have the same type %s != %s", target.Type(), replacement.Type())) - } - - if patch, ok := patches[target]; ok { - unpatch(target, patch) - } - - bytes := replaceFunction(*(*uintptr)(getPtr(target)), uintptr(getPtr(replacement))) - patches[target] = patch{bytes, &replacement} -} - -// Unpatch removes any monkey patches on target -// returns whether target was patched in the first place -func Unpatch(target interface{}) bool { - return unpatchValue(reflect.ValueOf(target)) -} - -// UnpatchInstanceMethod removes the patch on methodName of the target -// returns whether it was patched in the first place -func UnpatchInstanceMethod(target reflect.Type, methodName string) bool { - m, ok := target.MethodByName(methodName) - if !ok { - panic(fmt.Sprintf("unknown method %s", methodName)) - } - return unpatchValue(m.Func) -} - -// UnpatchAll removes all applied monkeypatches -func UnpatchAll() { - lock.Lock() - defer lock.Unlock() - for target, p := range patches { - unpatch(target, p) - delete(patches, target) - } -} - -// Unpatch removes a monkeypatch from the specified function -// returns whether the function was patched in the first place -func unpatchValue(target reflect.Value) bool { - lock.Lock() - defer lock.Unlock() - patch, ok := patches[target] - if !ok { - return false - } - unpatch(target, patch) - delete(patches, target) - return true -} - -func unpatch(target reflect.Value, p patch) { - copyToLocation(*(*uintptr)(getPtr(target)), p.originalBytes) -} diff --git a/vendor/github.com/bouk/monkey/monkey_386.go b/vendor/github.com/bouk/monkey/monkey_386.go deleted file mode 100644 index 16ed7930..00000000 --- a/vendor/github.com/bouk/monkey/monkey_386.go +++ /dev/null @@ -1,13 +0,0 @@ -package monkey - -// Assembles a jump to a function value -func jmpToFunctionValue(to uintptr) []byte { - return []byte{ - 0xBA, - byte(to), - byte(to >> 8), - byte(to >> 16), - byte(to >> 24), // mov edx,to - 0xFF, 0x22, // jmp DWORD PTR [edx] - } -} diff --git a/vendor/github.com/bouk/monkey/monkey_amd64.go b/vendor/github.com/bouk/monkey/monkey_amd64.go deleted file mode 100644 index c21da5d5..00000000 --- a/vendor/github.com/bouk/monkey/monkey_amd64.go +++ /dev/null @@ -1,17 +0,0 @@ -package monkey - -// Assembles a jump to a function value -func jmpToFunctionValue(to uintptr) []byte { - return []byte{ - 0x48, 0xBA, - byte(to), - byte(to >> 8), - byte(to >> 16), - byte(to >> 24), - byte(to >> 32), - byte(to >> 40), - byte(to >> 48), - byte(to >> 56), // movabs rdx,to - 0xFF, 0x22, // jmp QWORD PTR [rdx] - } -} diff --git a/vendor/github.com/bouk/monkey/replace.go b/vendor/github.com/bouk/monkey/replace.go deleted file mode 100644 index 1e347429..00000000 --- a/vendor/github.com/bouk/monkey/replace.go +++ /dev/null @@ -1,31 +0,0 @@ -package monkey - -import ( - "reflect" - "syscall" - "unsafe" -) - -func rawMemoryAccess(p uintptr, length int) []byte { - return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{ - Data: p, - Len: length, - Cap: length, - })) -} - -func pageStart(ptr uintptr) uintptr { - return ptr & ^(uintptr(syscall.Getpagesize() - 1)) -} - -// from is a pointer to the actual function -// to is a pointer to a go funcvalue -func replaceFunction(from, to uintptr) (original []byte) { - jumpData := jmpToFunctionValue(to) - f := rawMemoryAccess(from, len(jumpData)) - original = make([]byte, len(f)) - copy(original, f) - - copyToLocation(from, jumpData) - return -} diff --git a/vendor/github.com/bouk/monkey/replace_unix.go b/vendor/github.com/bouk/monkey/replace_unix.go deleted file mode 100644 index 34b100f3..00000000 --- a/vendor/github.com/bouk/monkey/replace_unix.go +++ /dev/null @@ -1,26 +0,0 @@ -//+build !windows - -package monkey - -import ( - "syscall" -) - -// this function is super unsafe -// aww yeah -// It copies a slice to a raw memory location, disabling all memory protection before doing so. -func copyToLocation(location uintptr, data []byte) { - f := rawMemoryAccess(location, len(data)) - - page := rawMemoryAccess(pageStart(location), syscall.Getpagesize()) - err := syscall.Mprotect(page, syscall.PROT_READ|syscall.PROT_WRITE|syscall.PROT_EXEC) - if err != nil { - panic(err) - } - copy(f, data[:]) - - err = syscall.Mprotect(page, syscall.PROT_READ|syscall.PROT_EXEC) - if err != nil { - panic(err) - } -} diff --git a/vendor/github.com/bouk/monkey/replace_windows.go b/vendor/github.com/bouk/monkey/replace_windows.go deleted file mode 100644 index b09091b3..00000000 --- a/vendor/github.com/bouk/monkey/replace_windows.go +++ /dev/null @@ -1,44 +0,0 @@ -package monkey - -import ( - "syscall" - "unsafe" -) - -const PAGE_EXECUTE_READWRITE = 0x40 - -var procVirtualProtect = syscall.NewLazyDLL("kernel32.dll").NewProc("VirtualProtect") - -func virtualProtect(lpAddress uintptr, dwSize int, flNewProtect uint32, lpflOldProtect unsafe.Pointer) error { - ret, _, _ := procVirtualProtect.Call( - lpAddress, - uintptr(dwSize), - uintptr(flNewProtect), - uintptr(lpflOldProtect)) - if ret == 0 { - return syscall.GetLastError() - } - return nil -} - -// this function is super unsafe -// aww yeah -// It copies a slice to a raw memory location, disabling all memory protection before doing so. -func copyToLocation(location uintptr, data []byte) { - f := rawMemoryAccess(location, len(data)) - - var oldPerms uint32 - err := virtualProtect(location, len(data), PAGE_EXECUTE_READWRITE, unsafe.Pointer(&oldPerms)) - if err != nil { - panic(err) - } - copy(f, data[:]) - - // VirtualProtect requires you to pass in a pointer which it can write the - // current memory protection permissions to, even if you don't want them. - var tmp uint32 - err = virtualProtect(location, len(data), oldPerms, unsafe.Pointer(&tmp)) - if err != nil { - panic(err) - } -} diff --git a/vendor/github.com/zhouzhuojie/withtimeout/.gitignore b/vendor/github.com/zhouzhuojie/withtimeout/.gitignore new file mode 100644 index 00000000..22d0d82f --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/.gitignore @@ -0,0 +1 @@ +vendor diff --git a/vendor/github.com/zhouzhuojie/withtimeout/LICENSE b/vendor/github.com/zhouzhuojie/withtimeout/LICENSE new file mode 100644 index 00000000..3ee01626 --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2014 Brave New Software Project, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/zhouzhuojie/withtimeout/Makefile b/vendor/github.com/zhouzhuojie/withtimeout/Makefile new file mode 100644 index 00000000..7e635d63 --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/Makefile @@ -0,0 +1,11 @@ +tidy: + @GO111MODULE=on go mod tidy + +vendor: tidy + @GO111MODULE=on go mod vendor + +build: + @GO111MODULE=on go build + +test: + @GO111MODULE=on go test -race -covermode=atomic . diff --git a/vendor/github.com/zhouzhuojie/withtimeout/README.md b/vendor/github.com/zhouzhuojie/withtimeout/README.md new file mode 100644 index 00000000..27fc017d --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/README.md @@ -0,0 +1,4 @@ +withtimeout [![GoDoc](https://godoc.org/github.com/zhouzhuojie/withtimeout?status.png)](http://godoc.org/github.com/zhouzhuojie/withtimeout) +========== + +`withtimeout` provides a Golang package that allows adding a timeout to any operation. It's based on `github.com/getlantern/withtimeout` with plain old goroutines, thus no extra dependencies. diff --git a/vendor/github.com/zhouzhuojie/withtimeout/go.mod b/vendor/github.com/zhouzhuojie/withtimeout/go.mod new file mode 100644 index 00000000..3beeae00 --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/go.mod @@ -0,0 +1,5 @@ +module github.com/zhouzhuojie/withtimeout + +go 1.12 + +require github.com/stretchr/testify v1.3.0 diff --git a/vendor/github.com/zhouzhuojie/withtimeout/go.sum b/vendor/github.com/zhouzhuojie/withtimeout/go.sum new file mode 100644 index 00000000..0ebf81c6 --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/go.sum @@ -0,0 +1,6 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= diff --git a/vendor/github.com/zhouzhuojie/withtimeout/withtimeout.go b/vendor/github.com/zhouzhuojie/withtimeout/withtimeout.go new file mode 100644 index 00000000..78faf19a --- /dev/null +++ b/vendor/github.com/zhouzhuojie/withtimeout/withtimeout.go @@ -0,0 +1,39 @@ +// Package withtimeout provides functionality for performing operations with +// a timeout. +package withtimeout + +import ( + "time" +) + +const ( + timeoutErrorString = "withtimeout: Operation timed out" +) + +type timeoutError struct{} + +func (timeoutError) Error() string { return timeoutErrorString } + +// Do executes the given fn and returns either the result of executing it or an +// error if fn did not complete within timeout. If execution timed out, timedOut +// will be true. +func Do(timeout time.Duration, fn func() (interface{}, error)) (result interface{}, timedOut bool, err error) { + resultCh := make(chan *resultWithError, 1) + + go func() { + result, err := fn() + resultCh <- &resultWithError{result, err} + }() + + select { + case <-time.After(timeout): + return nil, true, timeoutError{} + case rwe := <-resultCh: + return rwe.result, false, rwe.err + } +} + +type resultWithError struct { + result interface{} + err error +}