diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c6e83f56..bfa74272 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,8 +8,8 @@ permissions: jobs: golangci: permissions: - contents: read # for actions/checkout to fetch code - pull-requests: read # for golangci/golangci-lint-action to fetch pull requests + contents: read # for actions/checkout to fetch code + pull-requests: read # for golangci/golangci-lint-action to fetch pull requests name: lint runs-on: ubuntu-latest steps: @@ -30,4 +30,4 @@ jobs: cd hack/migrate go mod tidy changed_files=$(git status -s) - [[ -z "$changed_files" ]] || (printf "Change is detected in: \n$changed_files\n Did you run 'go mod tidy' before sending the PR?" && exit 1) + [[ -z "$changed_files" ]] || ( git diff && printf "Change is detected in: \n$changed_files\n Did you run 'go mod tidy' before sending the PR?" && exit 1) diff --git a/context/properties.go b/context/properties.go index 2d5ee4f9..bd705968 100644 --- a/context/properties.go +++ b/context/properties.go @@ -9,12 +9,15 @@ import ( "strings" "time" + "github.com/flanksource/commons/console" "github.com/flanksource/commons/logger" "github.com/flanksource/duty/models" + cmap "github.com/orcaman/concurrent-map/v2" "github.com/patrickmn/go-cache" ) var Local map[string]string +var supportedProperties = cmap.New[string]() var propertyCache = cache.New(time.Minute*15, time.Minute*15) @@ -22,43 +25,92 @@ func (k Context) ClearCache() { propertyCache = cache.New(time.Minute*15, time.Minute*15) } +func nilSafe(v interface{}) string { + if v == nil { + return "" + } + return fmt.Sprintf("%v", v) +} +func newProp(key, def string, val interface{}) { + if loaded := supportedProperties.SetIfAbsent(key, fmt.Sprintf("%s", val)); loaded { + if val == nil { + logger.Tracef("property: %s=%v", key, console.Grayf(nilSafe(def))) + } else { + logger.Debugf("property: %s=%v (default %v)", key, console.Greenf("%s", val), nilSafe(def)) + } + } +} + +func (p Properties) SupportedProperties() map[string]string { + m := make(map[string]string) + for t := range supportedProperties.IterBuffered() { + m[t.Key] = nilSafe(t.Val) + } + return m +} + type Properties map[string]string -func (p Properties) On(key string) bool { - return p[key] == "true" || p[key] == "off" +// Returns true if the property is true|enabled|on, if there is no property it defaults to true +func (p Properties) On(def bool, keys ...string) bool { + for _, key := range keys { + k, ok := p[key] + if ok { + v := k == "true" || k == "enabled" || k == "on" + newProp(key, fmt.Sprintf("%v", def), v) + return v + } + newProp(key, fmt.Sprintf("%v", def), nil) + } + return def } func (p Properties) Duration(key string, def time.Duration) time.Duration { if d, ok := p[key]; !ok { + newProp(key, fmt.Sprintf("%v", def), nil) return def } else if dur, err := time.ParseDuration(d); err != nil { logger.Warnf("property[%s] invalid duration %s", key, d) return def } else { + newProp(key, fmt.Sprintf("%v", def), dur) return dur } } func (p Properties) Int(key string, def int) int { if d, ok := p[key]; !ok { + newProp(key, fmt.Sprintf("%v", def), nil) return def } else if i, err := strconv.Atoi(d); err != nil { logger.Warnf("property[%s] invalid int %s", key, d) return def } else { + newProp(key, fmt.Sprintf("%v", def), i) return i } } func (p Properties) String(key string, def string) string { if d, ok := p[key]; ok { + newProp(key, fmt.Sprintf("%v", def), d) return d } + newProp(key, fmt.Sprintf("%v", def), nil) return def + } -func (p Properties) Off(key string) bool { - return p[key] == "false" || p[key] == "disabled" +// Returns true if the property is false|disabled|off, if there is no property it defaults to true +func (p Properties) Off(key string, def bool) bool { + k, ok := p[key] + if !ok { + newProp(key, fmt.Sprintf("%v", def), nil) + return def + } + v := k == "false" || k == "disabled" || k == "off" + newProp(key, fmt.Sprintf("%v", def), v) + return v } // Properties returns a cached map of properties diff --git a/go.mod b/go.mod index e04e50d8..b7bbaab7 100644 --- a/go.mod +++ b/go.mod @@ -32,6 +32,7 @@ require ( github.com/ohler55/ojg v1.20.3 github.com/onsi/ginkgo/v2 v2.17.2 github.com/onsi/gomega v1.33.0 + github.com/orcaman/concurrent-map/v2 v2.0.1 github.com/patrickmn/go-cache v2.1.0+incompatible github.com/prometheus/client_golang v1.14.0 github.com/robfig/cron/v3 v3.0.1 diff --git a/go.sum b/go.sum index beaf6489..8d8e71ae 100644 --- a/go.sum +++ b/go.sum @@ -1266,6 +1266,8 @@ github.com/onsi/gomega v1.33.0/go.mod h1:+925n5YtiFsLzzafLUHzVMBpvvRAzrydIBiSIxj github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/orcaman/concurrent-map/v2 v2.0.1 h1:jOJ5Pg2w1oeB6PeDurIYf6k9PQ+aTITr/6lP/L/zp6c= +github.com/orcaman/concurrent-map/v2 v2.0.1/go.mod h1:9Eq3TG2oBe5FirmYWQfYO5iH1q0Jv47PLaNK++uCdOM= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= diff --git a/job/job.go b/job/job.go index 0f47bef3..cf4104b9 100644 --- a/job/job.go +++ b/job/job.go @@ -324,12 +324,24 @@ func (j *Job) Run() { } } -func getProperty(j *Job, properties map[string]string, property string) (string, bool) { - if val, ok := properties[j.Name+"."+property]; ok { - return val, ok +func (j *Job) getPropertyNames(key string) []string { + if j.ID == "" { + return []string{ + fmt.Sprintf("jobs.%s.%s", j.Name, key), + fmt.Sprintf("jobs.%s", key)} } - if val, ok := properties[fmt.Sprintf("%s[%s].%s", j.Name, j.ID, property)]; ok { - return val, ok + return []string{ + fmt.Sprintf("jobs.%s.%s.%s", j.Name, j.ID, key), + fmt.Sprintf("jobs.%s.%s", j.Name, key), + fmt.Sprintf("jobs.%s", key)} +} + +func (j *Job) GetProperty(property string) (string, bool) { + if val := j.Context.Properties().String(j.Name+"."+property, ""); val != "" { + return val, true + } + if val := j.Context.Properties().String(fmt.Sprintf("%s[%s].%s", j.Name, j.ID, property), ""); val != "" { + return val, true } return "", false } @@ -346,12 +358,11 @@ func (j *Job) init() error { j.lastHistoryCleanup = time.Now() - properties := j.Context.Properties() - if schedule, ok := getProperty(j, properties, "schedule"); ok { + if schedule, ok := j.GetProperty("schedule"); ok { j.Schedule = schedule } - if timeout, ok := getProperty(j, properties, "timeout"); ok { + if timeout, ok := j.GetProperty("timeout"); ok { duration, err := time.ParseDuration(timeout) if err != nil { j.Context.Warnf("invalid timeout %s", timeout) @@ -359,21 +370,9 @@ func (j *Job) init() error { j.Timeout = duration } - if history, ok := getProperty(j, properties, "history"); ok { - j.JobHistory = !(history != "false") - } - - if trace := properties["jobs.trace"]; trace == "true" { - j.Trace = true - } else if trace, ok := getProperty(j, properties, "trace"); ok { - j.Trace = trace == "true" - } - - if debug := properties["jobs.debug"]; debug == "true" { - j.Debug = true - } else if debug, ok := getProperty(j, properties, "debug"); ok { - j.Debug = debug == "true" - } + j.JobHistory = j.Properties().On(true, j.getPropertyNames("history")...) + j.Trace = j.Properties().On(false, j.getPropertyNames("trace")...) + j.Debug = j.Properties().On(false, j.getPropertyNames("debug")...) // Set default retention if it is unset if j.Retention.Empty() { @@ -408,7 +407,7 @@ func (j *Job) init() error { j.Context = j.Context.WithObject(obj) - if dbLevel, ok := getProperty(j, properties, "db-log-level"); ok { + if dbLevel, ok := j.GetProperty("db-log-level"); ok { j.Context = j.Context.WithDBLogLevel(dbLevel) } @@ -452,7 +451,7 @@ func (j *Job) GetResourcedName() string { func (j *Job) AddToScheduler(cronRunner *cron.Cron) error { cronRunner.Start() schedule := j.Schedule - if override, ok := getProperty(j, j.Context.Properties(), "schedule"); ok { + if override, ok := j.GetProperty("schedule"); ok { schedule = override } diff --git a/tests/setup/common.go b/tests/setup/common.go index 6be58871..19089b2c 100644 --- a/tests/setup/common.go +++ b/tests/setup/common.go @@ -62,6 +62,8 @@ func MustDB() *sql.DB { } var WithoutDummyData = "without_dummy_data" +var WithExistingDatabase = "with_existing_database" +var recreateDatabase = os.Getenv("DUTY_DB_CREATE") != "false" func BeforeSuiteFn(args ...interface{}) context.Context { logger.UseZap() @@ -72,6 +74,9 @@ func BeforeSuiteFn(args ...interface{}) context.Context { if arg == WithoutDummyData { importDummyData = false } + if arg == WithExistingDatabase { + recreateDatabase = false + } } logger.Infof("Initializing test db debug=%v db.trace=%v", trace, dbTrace) @@ -93,7 +98,9 @@ func BeforeSuiteFn(args ...interface{}) context.Context { PgUrl = fmt.Sprintf("postgres://postgres:postgres@localhost:%d/%s?sslmode=disable", port, dbName) url := os.Getenv("DUTY_DB_URL") - if url != "" { + if url != "" && !recreateDatabase { + PgUrl = url + } else if url != "" && recreateDatabase { postgresDBUrl = url dbName = fmt.Sprintf("duty_gingko%d", port) PgUrl = strings.Replace(url, "/postgres", "/"+dbName, 1) @@ -101,7 +108,7 @@ func BeforeSuiteFn(args ...interface{}) context.Context { if err := execPostgres(postgresDBUrl, "CREATE DATABASE "+dbName); err != nil { panic(fmt.Sprintf("Cannot create %s: %v", dbName, err)) } - } else { + } else if url == "" { config, _ := GetEmbeddedPGConfig(dbName, port) postgresServer = embeddedPG.NewDatabase(config) if err = postgresServer.Start(); err != nil { @@ -165,7 +172,7 @@ func AfterSuiteFn() { if err := postgresServer.Stop(); err != nil { ginkgo.Fail(err.Error()) } - } else { + } else if recreateDatabase { if err := execPostgres(postgresDBUrl, fmt.Sprintf("DROP DATABASE %s (FORCE)", dbName)); err != nil { ginkgo.Fail(fmt.Sprintf("Cannot drop %s: %v", dbName, err)) } diff --git a/views/018_playbooks.sql b/views/018_playbooks.sql index 0583d245..cff030ca 100644 --- a/views/018_playbooks.sql +++ b/views/018_playbooks.sql @@ -1,89 +1,88 @@ -- Notify playbook action created or status updated CREATE OR REPLACE FUNCTION notify_playbook_action_update() RETURNS TRIGGER AS $$ -BEGIN - IF TG_OP = 'INSERT' THEN - NOTIFY playbook_action_updates; - ELSEIF TG_OP = 'UPDATE' THEN - IF OLD.status != NEW.status AND NEW.status = 'scheduled' THEN + BEGIN + IF TG_OP = 'INSERT' THEN NOTIFY playbook_action_updates; + ELSEIF TG_OP = 'UPDATE' THEN + IF OLD.status != NEW.status AND NEW.status = 'scheduled' THEN + NOTIFY playbook_action_updates; + END IF; END IF; - END IF; - - RETURN NULL; -END -$$ LANGUAGE plpgsql; + + RETURN NULL; + END + $$ LANGUAGE plpgsql; CREATE OR REPLACE TRIGGER playbook_action_updates -AFTER INSERT OR UPDATE ON playbook_run_actions -FOR EACH ROW -EXECUTE PROCEDURE notify_playbook_action_update(); + AFTER INSERT OR UPDATE ON playbook_run_actions + FOR EACH ROW + EXECUTE PROCEDURE notify_playbook_action_update(); -- Notify playbook run created or status updated CREATE OR REPLACE FUNCTION notify_playbook_run_update() RETURNS TRIGGER AS $$ -BEGIN - IF TG_OP = 'INSERT' THEN - NOTIFY playbook_run_updates; - ELSEIF TG_OP = 'UPDATE' THEN - IF OLD.status != NEW.status AND NEW.status = 'scheduled' THEN + BEGIN + IF TG_OP = 'INSERT' THEN NOTIFY playbook_run_updates; + ELSEIF TG_OP = 'UPDATE' THEN + IF OLD.status != NEW.status AND NEW.status = 'scheduled' THEN + NOTIFY playbook_run_updates; + END IF; END IF; - END IF; - - RETURN NULL; -END -$$ LANGUAGE plpgsql; + + RETURN NULL; + END + $$ LANGUAGE plpgsql; CREATE OR REPLACE TRIGGER playbook_run_updates -AFTER INSERT OR UPDATE ON playbook_runs -FOR EACH ROW -EXECUTE PROCEDURE notify_playbook_run_update(); + AFTER INSERT OR UPDATE ON playbook_runs + FOR EACH ROW + EXECUTE PROCEDURE notify_playbook_run_update(); -- Notify playbook updates -CREATE OR REPLACE FUNCTION notify_playbook_update() -RETURNS TRIGGER AS $$ -DECLARE payload TEXT; -BEGIN - payload = NEW.id::TEXT; - PERFORM pg_notify('playbook_updated', payload); - RETURN NULL; -END; -$$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION notify_playbook_update() + RETURNS TRIGGER AS $$ + DECLARE payload TEXT; + BEGIN + payload = NEW.id::TEXT; + PERFORM pg_notify('playbook_updated', payload); + RETURN NULL; + END; + $$ LANGUAGE plpgsql; CREATE OR REPLACE TRIGGER playbook_updated_trigger -AFTER UPDATE ON playbooks -FOR EACH ROW -EXECUTE PROCEDURE notify_playbook_update(); + AFTER UPDATE ON playbooks + FOR EACH ROW + EXECUTE PROCEDURE notify_playbook_update(); -- List of all the playbooks that can be run by an agent DROP VIEW IF EXISTS playbooks_for_agent; -CREATE OR REPLACE VIEW -playbooks_for_agent AS -WITH interim AS ( +CREATE OR REPLACE VIEW playbooks_for_agent AS + WITH interim AS ( + SELECT + id, + jsonb_array_elements_text(spec -> 'runsOn') AS agent_name + FROM + playbooks + WHERE + spec ->> 'runsOn' IS NOT NULL + ) SELECT - id, - jsonb_array_elements_text(spec -> 'runsOn') AS agent_name + interim.agent_name, + agents.person_id, + agents.id as agent_id, + jsonb_agg(interim.id) AS playbook_ids FROM - playbooks - WHERE - spec ->> 'runsOn' IS NOT NULL -) -SELECT - interim.agent_name, - agents.person_id, - agents.id as agent_id, - jsonb_agg(interim.id) AS playbook_ids -FROM - interim - INNER JOIN agents ON interim.agent_name :: TEXT = agents.name -GROUP BY agent_name, agents.person_id, agent_id; + interim + INNER JOIN agents ON interim.agent_name :: TEXT = agents.name + GROUP BY agent_name, agents.person_id, agent_id; DROP VIEW IF EXISTS playbook_names; CREATE OR REPLACE VIEW playbook_names AS SELECT id, name, - description, + spec ->> 'description' AS description, spec ->> 'category' AS category, spec ->> 'icon' AS icon FROM @@ -91,4 +90,4 @@ CREATE OR REPLACE VIEW playbook_names AS WHERE deleted_at IS NULL ORDER BY - name; \ No newline at end of file + name;