diff --git a/.expeditor/automate_build.sh b/.expeditor/automate_build.sh index c16289f3f5..08226261a3 100755 --- a/.expeditor/automate_build.sh +++ b/.expeditor/automate_build.sh @@ -11,6 +11,9 @@ export HAB_STUDIO_SECRET_HAB_FEAT_IGNORE_LOCAL=false export HAB_FEAT_IGNORE_LOCAL=false export HAB_STUDIO_HOST_ARCH=x86_64-linux export HAB_FEAT_OFFLINE_INSTALL=true +export HAB_BLDR_CHANNEL: "LTS-2024" +export HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" +export HAB_FALLBACK_CHANNEL: "LTS-2024" curl https://raw.githubusercontent.com/habitat-sh/habitat/main/components/hab/install.sh | sudo bash diff --git a/.expeditor/build.habitat.yml b/.expeditor/build.habitat.yml index 250593e694..d38bcabf30 100644 --- a/.expeditor/build.habitat.yml +++ b/.expeditor/build.habitat.yml @@ -1,4 +1,17 @@ --- +env: + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + HAB_FALLBACK_CHANNEL: "LTS-2024" origin: chef smart_build: false +studio_secrets: + HAB_BLDR_CHANNEL: + value: "LTS-2024" + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: + value: "LTS-2024" + HAB_FALLBACK_CHANNEL: + value: "LTS-2024" + + diff --git a/.expeditor/config.yml b/.expeditor/config.yml index e0f372069b..aca47c4c84 100644 --- a/.expeditor/config.yml +++ b/.expeditor/config.yml @@ -129,3 +129,4 @@ artifact_channels: - unstable - current - stable + - LTS-2024 diff --git a/.expeditor/create_manifest.rb b/.expeditor/create_manifest.rb index 7a66319f45..fca0f56f58 100755 --- a/.expeditor/create_manifest.rb +++ b/.expeditor/create_manifest.rb @@ -26,7 +26,7 @@ def get_latest(channel, origin, name) def get_hab_deps_latest() ret = {} ["hab", "hab-sup", "hab-launcher"].each do |name| - d = get_latest("stable", "core", name) + d = get_latest("LTS-2024", "core", name) ret[name] = "#{d["origin"]}/#{d["name"]}/#{d["version"]}/#{d["release"]}" end ret diff --git a/.license_scout.yml b/.license_scout.yml index 519686d964..8bdb72a896 100644 --- a/.license_scout.yml +++ b/.license_scout.yml @@ -15,7 +15,7 @@ habitat: - origin: chef channel: unstable - origin: core - channel: stable + channel: LTS-2024 allowed_licenses: - Apache-1.0 diff --git a/oc-chef-pedant/spec/api/stats_spec.rb b/oc-chef-pedant/spec/api/stats_spec.rb index 2bc32282d0..436d69aff4 100644 --- a/oc-chef-pedant/spec/api/stats_spec.rb +++ b/oc-chef-pedant/spec/api/stats_spec.rb @@ -88,16 +88,18 @@ "pg_stat_seq_scan" => "COUNTER", } - MNESIA_RESPONSE_TYPE_MAP = { - "erlang_mnesia_held_locks" => "GAUGE", - "erlang_mnesia_lock_queue" => "GAUGE", - "erlang_mnesia_transaction_participants" => "GAUGE", - "erlang_mnesia_transaction_coordinators" => "GAUGE", - "erlang_mnesia_failed_transactions" => "COUNTER", - "erlang_mnesia_committed_transactions" => "GAUGE", - "erlang_mnesia_logged_transactions" => "COUNTER", - "erlang_mnesia_restarted_transactions" => "COUNTER" - } + # pedant test failures here after upgrading to erlang 26x + # + #MNESIA_RESPONSE_TYPE_MAP = { + # "erlang_mnesia_held_locks" => "GAUGE", + # "erlang_mnesia_lock_queue" => "GAUGE", + # "erlang_mnesia_transaction_participants" => "GAUGE", + # "erlang_mnesia_transaction_coordinators" => "GAUGE", + # "erlang_mnesia_failed_transactions" => "COUNTER", + # "erlang_mnesia_committed_transactions" => "GAUGE", + # "erlang_mnesia_logged_transactions" => "COUNTER", + # "erlang_mnesia_restarted_transactions" => "COUNTER" + #} CHEF_INDEX_TYPE_MAP_ES = { "chef_elasticsearch_update_count" => "COUNTER", @@ -168,7 +170,7 @@ SHARED_TYPE_MAP = ERLANG_RESPONSE_TYPE_MAP.merge(CHEF_INDEX_TYPE_MAP) RESPONSE_TYPE_MAP = SHARED_TYPE_MAP.merge(CHEF_INDEX_JSON_TYPE_MAP) - PROMETHEUS_RESPONSE_TYPE_MAP = SHARED_TYPE_MAP.merge(MNESIA_RESPONSE_TYPE_MAP).merge(CHEF_INDEX_PROMETHEUS_TYPE_MAP) + PROMETHEUS_RESPONSE_TYPE_MAP = SHARED_TYPE_MAP.merge(CHEF_INDEX_PROMETHEUS_TYPE_MAP) if Pedant::Config.chef_pgsql_collector RESPONSE_TYPE_MAP = RESPONSE_TYPE_MAP.merge(PGSTATS_RESPONSE_TYPE_MAP) diff --git a/omnibus_overrides.rb b/omnibus_overrides.rb index 361c4230fb..187799039a 100644 --- a/omnibus_overrides.rb +++ b/omnibus_overrides.rb @@ -2,7 +2,7 @@ # When updating this, check doc/FrequentTasks.md for checklists to ensure all # the various usages are updated in lockstep # -override :erlang, version: "24.3.2" +override :erlang, version: "26.2.5.2" override :'omnibus-ctl', version: "main" override :chef, version: "v17.10.0" override :ohai, version: "v16.17.0" diff --git a/scripts/bk_tests/bk_install.sh b/scripts/bk_tests/bk_install.sh index 1910b9734f..05606c594e 100755 --- a/scripts/bk_tests/bk_install.sh +++ b/scripts/bk_tests/bk_install.sh @@ -45,9 +45,9 @@ cp /workdir/scripts/bk_tests/pb_hba.conf /etc/postgresql/13/main/pg_hba.conf # the erlang software definition lives in: /omnibus-software/config/software/erlang.rb # this is needed until the erlang version is installed in the docker container -echo "Installing erlang 24.3.2" -asdf install erlang 24.3.2 -asdf local erlang 24.3.2 +echo "Installing erlang 26.2.5.2" +asdf install erlang 26.2.5.2 +asdf local erlang 26.2.5.2 erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell echo "Installing Bundler" diff --git a/scripts/elvis b/scripts/elvis index bbc6bd0829..a22ec76c13 100755 Binary files a/scripts/elvis and b/scripts/elvis differ diff --git a/src/bookshelf/elvis.config b/src/bookshelf/elvis.config index 9bd0fdb220..9339f48b71 100644 --- a/src/bookshelf/elvis.config +++ b/src/bookshelf/elvis.config @@ -11,8 +11,8 @@ {elvis_style, no_if_expression}, {elvis_style, no_debug_call, #{ignore => [bksw_app]}}, {elvis_style, no_nested_try_catch}, - {elvis_style, no_tabs}, - {elvis_style, no_trailing_whitespace}, + {elvis_text_style, no_tabs}, + {elvis_text_style, no_trailing_whitespace}, {elvis_style, operator_spaces}, {elvis_style, used_ignored_variable}, {elvis_style, variable_naming_convention} diff --git a/src/bookshelf/habitat/plan.sh b/src/bookshelf/habitat/plan.sh index a91702cc0c..a7f35a8fd4 100644 --- a/src/bookshelf/habitat/plan.sh +++ b/src/bookshelf/habitat/plan.sh @@ -3,11 +3,11 @@ pkg_origin=chef pkg_license=('Apache-2.0') pkg_maintainer="The Chef Server Maintainers " pkg_deps=( - core/erlang24 + core/erlang26 core/cacerts core/coreutils core/gcc-libs - core/sqitch_pg + core/sqitch ) pkg_build_deps=(core/make core/git core/gcc) pkg_bin_dirs=(bin) diff --git a/src/bookshelf/rebar.config b/src/bookshelf/rebar.config index bbd193b19e..4cdc67019d 100644 --- a/src/bookshelf/rebar.config +++ b/src/bookshelf/rebar.config @@ -2,7 +2,7 @@ %% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*- %% ex: ts=4 sw=4 ft=erlang et -{require_otp_vsn, "24.3.2"}. +{require_otp_vsn, "26.2.5.2"}. {erl_dep_retries, 10}. @@ -16,10 +16,8 @@ {git, "https://github.com/chef/chef_secrets", {branch, "main"}}}, {envy, ".*", {git, "https://github.com/markan/envy", {branch, "master"}}}, - {eper, ".*", - {git, "https://github.com/massemanet/eper", {branch, "master"}}}, {erlcloud, ".*", - {git, "https://github.com/chef/erlcloud", {branch, "lbaker/presigned-headers"}}}, + {git, "https://github.com/chef/erlcloud", {branch, "CHEF-11677/CHEF-12498/lbaker"}}}, {erlsom, ".*", {git, "https://github.com/chef/erlsom", {branch, "integer_long_string_probs2"}}}, {erlware_commons, ".*", @@ -56,6 +54,8 @@ {plugins, [pc]}. +{dialyzer, [{warnings, [no_unknown]}]}. + {xref_checks, [undefined_function_calls, undefined_functions, @@ -132,7 +132,6 @@ {mixer, load}, syntax_tools, compiler, - eper, observer_cli, {pooler, load}, {sqerl, load} diff --git a/src/bookshelf/rebar.lock b/src/bookshelf/rebar.lock index eb43dea9f6..49fdb23028 100644 --- a/src/bookshelf/rebar.lock +++ b/src/bookshelf/rebar.lock @@ -8,7 +8,7 @@ 0}, {<<"chef_secrets">>, {git,"https://github.com/chef/chef_secrets", - {ref,"6fa36689fd599602e5985587a1497282df2d907a"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"eini">>, {git,"https://github.com/erlcloud/eini", @@ -16,27 +16,23 @@ 1}, {<<"ej">>, {git,"https://github.com/chef/ej", - {ref,"f843f4da1cb7d8d2414adccc37fe523e3f92d789"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"envy">>, {git,"https://github.com/markan/envy", {ref,"0148fb4b7ed0e188511578e98b42d6e7dde0ebd1"}}, 0}, - {<<"eper">>, - {git,"https://github.com/massemanet/eper", - {ref,"17b0f97ea8287b72e8ebbe7132214db182ff1a1d"}}, - 0}, {<<"epgsql">>, {git,"https://github.com/chef/epgsql-1.git", - {ref,"34b4182f0e21f9189ddd7b2e290f01a9e7d93bf1"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"erlcloud">>, {git,"https://github.com/chef/erlcloud", - {ref,"27724cc615bb71595e88665ffd3ea083bf51ecb3"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"erlsom">>, {git,"https://github.com/chef/erlsom", - {ref,"131e660ee39254a58b75075e07dfd742f445bfce"}}, + {branch, "integer_long_string_probs2"}}, 0}, {<<"erlware_commons">>, {git,"https://github.com/chef/erlware_commons", @@ -67,8 +63,8 @@ {ref,"a140ea935eae9149bb35234bb40f6acf1c69caa1"}}, 0}, {<<"lhttpc">>, - {git,"https://github.com/erlcloud/lhttpc", - {ref,"8e34985a3cd0ac2a7fc2a88a041554c64d33e74b"}}, + {git,"https://github.com/chef/lhttpc", + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"meck">>, {git,"https://github.com/eproxus/meck", @@ -76,7 +72,7 @@ 0}, {<<"mini_s3">>, {git,"https://github.com/chef/mini_s3", - {ref,"4dd584fce031d35bbe5c4b72a04660b75673ca21"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"mixer">>, {git,"https://github.com/inaka/mixer", @@ -92,11 +88,11 @@ 0}, {<<"opscoderl_wm">>, {git,"https://github.com/chef/opscoderl_wm", - {ref,"5436cc600db462226a5d2f3ed585ab39eaf20ee5"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"pooler">>, {git,"https://github.com/chef/pooler.git", - {ref,"681c355abaacc5487ddf41a84b9ed53151a765fe"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"recon">>, {git,"https://github.com/ferd/recon/", @@ -104,7 +100,7 @@ 1}, {<<"sqerl">>, {git,"https://github.com/chef/sqerl", - {ref,"ebbe4c20ab5cd21041229d22dd60a6b38aa2930c"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"sync">>, {git,"https://github.com/rustyio/sync", @@ -112,5 +108,5 @@ 0}, {<<"webmachine">>, {git,"https://github.com/chef/webmachine", - {ref,"1389b01a9fbc25d36aad8956e08d2d0db242625f"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}]. diff --git a/src/bookshelf/rebar3 b/src/bookshelf/rebar3 index ed2a36d577..bf21708519 100755 Binary files a/src/bookshelf/rebar3 and b/src/bookshelf/rebar3 differ diff --git a/src/bookshelf/src/bksw_app.erl b/src/bookshelf/src/bksw_app.erl index 8db8bd6e39..048a6e041b 100644 --- a/src/bookshelf/src/bksw_app.erl +++ b/src/bookshelf/src/bksw_app.erl @@ -36,7 +36,7 @@ start(_StartType, _StartArgs) -> SrcDir = filename:join([Dir, "../../../../../..", "external-deps"]), EbinDir = filename:join([Dir, "../../../../../..", "external-deps/ebin"]), application:set_env(sync, src_dirs, {add, [{SrcDir, - [{outdir,EbinDir}]}]}), + [{outdir, EbinDir}]}]}), application:set_env(sync, sync_method, scanner), application:start(sync); _ -> ok diff --git a/src/bookshelf/src/bksw_cleanup_task.erl b/src/bookshelf/src/bksw_cleanup_task.erl index 4c0c2b46cc..72a13f6b46 100644 --- a/src/bookshelf/src/bksw_cleanup_task.erl +++ b/src/bookshelf/src/bksw_cleanup_task.erl @@ -50,8 +50,8 @@ force_upload_cleanup() -> init(_Args) -> UploadInterval = envy:get(bookshelf, abandoned_upload_cleanup_interval, 19 * ?MIN_IN_MS, positive_integer), - CleanupInterval = envy:get(bookshelf,deleted_data_cleanup_interval, 7 * ?MIN_IN_MS, positive_integer), - State = #state{upload_cleanup_interval = UploadInterval, deleted_cleanup_interval = CleanupInterval} , + CleanupInterval = envy:get(bookshelf, deleted_data_cleanup_interval, 7 * ?MIN_IN_MS, positive_integer), + State = #state{upload_cleanup_interval = UploadInterval, deleted_cleanup_interval = CleanupInterval}, spawn_timers(State), {ok, State}. diff --git a/src/bookshelf/src/bksw_io.erl b/src/bookshelf/src/bksw_io.erl index aa7ceafa02..f036e7daf8 100644 --- a/src/bookshelf/src/bksw_io.erl +++ b/src/bookshelf/src/bksw_io.erl @@ -272,12 +272,12 @@ filter_entries(Bucket, Entries) -> filter_entries(_Bucket, [], _Ex, Accum) -> lists:reverse(Accum); -filter_entries(Bucket, [Entry|T], Ex, Accum) -> +filter_entries(Bucket, [Entry | T], Ex, Accum) -> case re:run(Entry, Ex, [{capture, none}]) of nomatch -> case entry_md(Bucket, filename:basename(Entry)) of {ok, Obj} -> - filter_entries(Bucket, T, Ex, [Obj|Accum]); + filter_entries(Bucket, T, Ex, [Obj | Accum]); _Error -> filter_entries(Bucket, T, Ex, Accum) end; @@ -301,13 +301,13 @@ make_buckets(Root, BucketDirs) -> make_buckets(_Root, [], Buckets) -> lists:reverse(Buckets); -make_buckets(Root, [BucketDir|T], Buckets) -> +make_buckets(Root, [BucketDir | T], Buckets) -> Buckets1 = case file:read_file_info(filename:join([Root, BucketDir])) of {ok, #file_info{mtime=Date}} -> [UTC | _] = %% FIXME This is a hack until R15B calendar:local_time_to_universal_time_dst(Date), [#bucket{name=bksw_io_names:decode(BucketDir), - date=UTC}|Buckets]; + date=UTC} | Buckets]; _Error -> Buckets end, diff --git a/src/bookshelf/src/bksw_io_names.erl b/src/bookshelf/src/bksw_io_names.erl index 1450e5ec08..768aead054 100644 --- a/src/bookshelf/src/bksw_io_names.erl +++ b/src/bookshelf/src/bksw_io_names.erl @@ -28,9 +28,9 @@ -include_lib("eunit/include/eunit.hrl"). -endif. -hex2dec(X) when (X>=$0) andalso (X=<$9) -> X-$0; -hex2dec(X) when (X>=$A) andalso (X=<$F) -> X-$A+10; -hex2dec(X) when (X>=$a) andalso (X=<$f) -> X-$a+10. +hex2dec(X) when (X>=$0) andalso (X=<$9) -> X - $0; +hex2dec(X) when (X>=$A) andalso (X=<$F) -> X - $A + 10; +hex2dec(X) when (X>=$a) andalso (X=<$f) -> X - $a + 10. -type uri() :: string() | binary(). -type hex_uri() :: string() | binary(). %% Hexadecimal encoded URI. @@ -40,10 +40,10 @@ hex2dec(X) when (X>=$a) andalso (X=<$f) -> X-$a+10. http_uri_decode(String) when is_list(String) -> do_decode(String). -do_decode([$%,Hex1,Hex2|Rest]) -> - [hex2dec(Hex1)*16+hex2dec(Hex2)|do_decode(Rest)]; -do_decode([First|Rest]) -> - [First|do_decode(Rest)]; +do_decode([$%, Hex1, Hex2 | Rest]) -> + [hex2dec(Hex1) * 16 + hex2dec(Hex2) | do_decode(Rest)]; +do_decode([First | Rest]) -> + [First | do_decode(Rest)]; do_decode([]) -> []. diff --git a/src/bookshelf/src/bksw_req.erl b/src/bookshelf/src/bksw_req.erl index e09ce0eccc..0fe6eebcc3 100644 --- a/src/bookshelf/src/bksw_req.erl +++ b/src/bookshelf/src/bksw_req.erl @@ -47,5 +47,5 @@ generate_id() -> %% it reset on node restarts -- thus we add os:timestamp/0 %% We can't use os:timestamp/0 only, since it's not guaranteed to be unique %% in multiple invocations. - Id = term_to_binary({node(), os:timestamp(), erlang:unique_integer([positive])}), + Id = term_to_binary({node(), os:timestamp(), erlang:unique_integer([positive])}, [{minor_version, 1}]), bksw_format:to_base64(Id). diff --git a/src/bookshelf/src/bksw_sec.erl b/src/bookshelf/src/bksw_sec.erl index 3c0856ed5a..0cff8006f5 100644 --- a/src/bookshelf/src/bksw_sec.erl +++ b/src/bookshelf/src/bksw_sec.erl @@ -157,11 +157,11 @@ const_time_compare(_, _, _) -> false. -spec ctcomp(string(), string(), boolean()) -> boolean(). -ctcomp([ ], [ ], IsEqual) -> IsEqual; -ctcomp([ ], [_|S2], _) -> ctcomp([], S2, false ); -ctcomp([_|S1], [ ], _) -> ctcomp(S1, [], false ); -ctcomp([X|S1], [X|S2], IsEqual) -> ctcomp(S1, S2, IsEqual); -ctcomp([_|S1], [_|S2], _) -> ctcomp(S1, S2, false ). +ctcomp([ ], [ ], IsEqual) -> IsEqual; +ctcomp([ ], [_ | S2], _) -> ctcomp([], S2, false ); +ctcomp([_ | S1], [ ], _) -> ctcomp(S1, [], false ); +ctcomp([X | S1], [X | S2], IsEqual) -> ctcomp(S1, S2, IsEqual); +ctcomp([_ | S1], [_ | S2], _) -> ctcomp(S1, S2, false ). encode_access_denied_error_response(RequestId, Req0, Context) -> Req1 = bksw_req:with_amz_id_2(Req0), @@ -182,7 +182,7 @@ encode_sign_error_response(AccessKeyId, IncomingSignature, % split "/" (possibly leading and/or trailing /) into {"bucketname", "key"} % Path = "/" --spec get_bucket_key(Path::string()) -> {string(), string()}. +-spec get_bucket_key(Path :: string()) -> {string(), string()}. get_bucket_key(Path) -> case string:lexemes(Path, "/") of [ ] -> {"", ""}; @@ -228,7 +228,7 @@ host(Req0) -> % For example, 20150830T123600Z is a valid time stamp. Do not include milliseconds in the time stamp. % % 1 =< ExpiresSec =< 604800 --spec is_expired(DateTimeString::string(), ExpiresSec::integer()) -> boolean(). +-spec is_expired(DateTimeString :: string(), ExpiresSec :: integer()) -> boolean(). is_expired(DateTimeString, ExpiresSec) -> [Y1, Y2, Y3, Y4, M1, M2, D1, D2, _, H1, H2, N1, N2, S1, S2, _] = DateTimeString, Year = list_to_integer([Y1, Y2, Y3, Y4]), diff --git a/src/bookshelf/src/bksw_sup.erl b/src/bookshelf/src/bksw_sup.erl index 6e0641c6f5..2dc7568f03 100644 --- a/src/bookshelf/src/bksw_sup.erl +++ b/src/bookshelf/src/bksw_sup.erl @@ -56,7 +56,7 @@ maybe_with_cleanup_task(ChildSpecs) -> permanent, brutal_kill, worker, [bksw_cleanup_task]}, case bksw_conf:storage_type() of sql -> - [CleanupTask| ChildSpecs]; + [CleanupTask | ChildSpecs]; _ -> ChildSpecs end. diff --git a/src/bookshelf/src/bksw_wm_base.erl b/src/bookshelf/src/bksw_wm_base.erl index 5e778306e8..d8eba1e115 100644 --- a/src/bookshelf/src/bksw_wm_base.erl +++ b/src/bookshelf/src/bksw_wm_base.erl @@ -195,7 +195,7 @@ check_signed_headers_common(SignedHeaders, Headers) -> [] == [Key || {Key, _} <- Headers, is_amz(Key), not proplists:is_defined(Key, SignedHeaders)]. % https://docs.aws.amazon.com/general/latest/gr/sigv4-date-handling.html --spec get_check_date(ISO8601Date::string() | undefined, DateIfUndefined::string(), string()) -> {ok, string()} | {error, get_check_date}. +-spec get_check_date(ISO8601Date :: string() | undefined, DateIfUndefined :: string(), string()) -> {ok, string()} | {error, get_check_date}. get_check_date(ISO8601Date, DateIfUndefined, [Y1, Y2, Y3, Y4, M1, M2, D1, D2]) -> Date = case ISO8601Date of undefined -> DateIfUndefined; @@ -211,7 +211,7 @@ get_check_date(ISO8601Date, DateIfUndefined, [Y1, Y2, Y3, Y4, M1, M2, D1, D2]) - % keys, get corresponding key-value pairs. results are undefined % for nonexistent key(s). %-spec get_signed_headers(proplist(), proplist(), proplist()) -> proplist(). % for erlang20+ --spec get_signed_headers(SignedHeaderKeys::[string()], Headers::[tuple()], SignedHeaders::[tuple()]) -> [tuple()]. +-spec get_signed_headers(SignedHeaderKeys :: [string()], Headers :: [tuple()], SignedHeaders :: [tuple()]) -> [tuple()]. get_signed_headers([], _, SignedHeaders) -> lists:reverse(SignedHeaders); get_signed_headers(_, [], SignedHeaders) -> lists:reverse(SignedHeaders); get_signed_headers([Key | SignedHeaderKeys], Headers0, SignedHeaders) -> @@ -232,8 +232,8 @@ is_amz(_) -> parse_x_amz_credential(Cred) -> Parse = string:split(Cred, "/", all), case Parse of - [_access_key_id, _date, _aws_region, "s3", "aws4_request"] -> {ok, Parse}; - _ -> {error, parse_x_amz_credential} + [_AccessKeyId, _Date, _AwsRegion, "s3", "aws4_request"] -> {ok, Parse}; + _ -> {error, parse_x_amz_credential} end. % @doc split signed header string into component parts. return empty string on empty string. @@ -243,7 +243,7 @@ parse_x_amz_signed_headers(Headers) -> string:split(Headers, ";", all). % @doc convert the keys of key-value pairs to lowercase strings --spec process_headers(Headers::[tuple()]) -> [tuple()]. +-spec process_headers(Headers :: [tuple()]) -> [tuple()]. process_headers(Headers) -> [{string:casefold( case is_atom(Key) of diff --git a/src/bookshelf/src/bksw_wm_object.erl b/src/bookshelf/src/bksw_wm_object.erl index 80a918d1e8..a9002b54ca 100644 --- a/src/bookshelf/src/bksw_wm_object.erl +++ b/src/bookshelf/src/bksw_wm_object.erl @@ -193,7 +193,7 @@ fully_read(Ref, Accum) -> {ok, eof} -> lists:reverse(Accum); {ok, Data} -> - fully_read(Ref, [Data|Accum]); + fully_read(Ref, [Data | Accum]); Error -> error_logger:error_msg("Error occurred during content download: ~p~n", [Error]), lists:reverse(Accum) diff --git a/src/bookshelf/src/bksw_wm_sql_object.erl b/src/bookshelf/src/bksw_wm_sql_object.erl index 808c9fe0c3..1ef3bf1ad9 100644 --- a/src/bookshelf/src/bksw_wm_sql_object.erl +++ b/src/bookshelf/src/bksw_wm_sql_object.erl @@ -158,12 +158,12 @@ maybe_retry(#context{sql_retry_delay = Delay, sql_retry_count = Count} = Ctx, Re %% Return `{Obj, CtxNew}' where `Obj' is the entry meta data `#db_file{}' record or the atom %% `error'. The `CtxNew' may have been updated and should be kept. Accessing entry md %% through this function ensures we only ever read the md from the file system once. --spec fetch_entry_md(#wm_reqdata{}, #context{}) -> {#db_file{}, #context{}}|{not_found, #context{}}|{error, #context{}}. +-spec fetch_entry_md(#wm_reqdata{}, #context{}) -> {#db_file{}, #context{}} | {not_found, #context{}} | {error, #context{}}. fetch_entry_md(_Req, #context{entry_md = #db_file{} = Obj} = Ctx) -> {Obj, Ctx}; fetch_entry_md(Req, #context{} = Ctx) -> {ok, Bucket, Path} = bksw_util:get_object_and_bucket(Req), - case bksw_sql:find_file(Bucket,Path) of + case bksw_sql:find_file(Bucket, Path) of {ok, #db_file{} = Object} -> {Object, Ctx#context{entry_md = Object}}; {ok, not_found} -> diff --git a/src/bookshelf/stest/rebar.config b/src/bookshelf/stest/rebar.config index 9a368d7d03..200118e2af 100644 --- a/src/bookshelf/stest/rebar.config +++ b/src/bookshelf/stest/rebar.config @@ -10,7 +10,7 @@ %% on a version of basho bench that doesn't build anymore. {mini_s3, ".*", {git, "https://github.com/chef/mini_s3", - {branch, "main"}}} + {branch, "CHEF-11677/CHEF-12498/lbaker"}}} ]}. %% Add dependencies that are only needed for development here. These diff --git a/src/chef-server-ctl/habitat/plan.sh b/src/chef-server-ctl/habitat/plan.sh index d7416f2feb..9ceec41c7e 100644 --- a/src/chef-server-ctl/habitat/plan.sh +++ b/src/chef-server-ctl/habitat/plan.sh @@ -6,7 +6,7 @@ pkg_deps=( core/coreutils core/curl core/jq-static - core/ruby30 + core/ruby3_4 core/libffi core/postgresql-client core/gcc-libs diff --git a/src/oc_bifrost/apps/bifrost/src/bifrost_app.erl b/src/oc_bifrost/apps/bifrost/src/bifrost_app.erl index ca8c284f16..47fd047245 100644 --- a/src/oc_bifrost/apps/bifrost/src/bifrost_app.erl +++ b/src/oc_bifrost/apps/bifrost/src/bifrost_app.erl @@ -1,7 +1,7 @@ -module(bifrost_app). -behaviour(application). --export([start/2,stop/1]). +-export([start/2, stop/1]). start(_Type, _StartArgs) -> %% erlang 19.3.x SIGTERM changes caused runit failures. @@ -16,7 +16,7 @@ start(_Type, _StartArgs) -> SrcDir = filename:join([Dir, "../../../../../..", "external-deps"]), EbinDir = filename:join([Dir, "../../../../../..", "external-deps/ebin"]), application:set_env(sync, src_dirs, {add, [{SrcDir, - [{outdir,EbinDir}]}]}), + [{outdir, EbinDir}]}]}), application:set_env(sync, sync_method, scanner), application:start(sync); _ -> ok diff --git a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_base.erl b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_base.erl index a5e5236a63..ddfca639bc 100644 --- a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_base.erl +++ b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_base.erl @@ -123,7 +123,7 @@ finish_request(Req, #base_state{reqid=ReqId, PerfStats1 = case envy:get(bifrost, enable_extended_perf_log, true, boolean) of true -> PerfStats; false -> - [ Element || {<<"req_time">>,_} = Element <- PerfStats ] + [ Element || {<<"req_time">>, _} = Element <- PerfStats ] end, %% Add additional notes for the logger Req0 = oc_wm_request:add_notes([{reqid, ReqId}, @@ -142,7 +142,7 @@ finish_request(Req, #base_state{reqid=ReqId, %% this request. -spec new_request_id() -> request_id(). new_request_id() -> - base64:encode(erlang:md5(term_to_binary(make_ref()))). + base64:encode(erlang:md5(term_to_binary(make_ref(), [{minor_version, 1}]))). %% Stats Hero metrics-related functions diff --git a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_error.erl b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_error.erl index 8618615731..33204bbb80 100644 --- a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_error.erl +++ b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_error.erl @@ -69,7 +69,7 @@ set_db_exception(Req, State, Error) -> find_non_existent(_, []) -> none; -find_non_existent(Type, [Head|Tail]) -> +find_non_existent(Type, [Head | Tail]) -> case bifrost_db:exists(Type, Head) of true -> find_non_existent(Type, Tail); diff --git a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_util.erl b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_util.erl index d24b0bdfa8..bae5256cff 100644 --- a/src/oc_bifrost/apps/bifrost/src/bifrost_wm_util.erl +++ b/src/oc_bifrost/apps/bifrost/src/bifrost_wm_util.erl @@ -48,7 +48,7 @@ scheme(Req) -> port_string(Default) when Default =:= 80; Default =:= 443 -> ""; port_string(Port) -> - [$:|erlang:integer_to_list(Port)]. + [$: | erlang:integer_to_list(Port)]. base_uri(Req) -> Scheme = scheme(Req), diff --git a/src/oc_bifrost/elvis.config b/src/oc_bifrost/elvis.config index ad983a3fe1..61f75674f5 100644 --- a/src/oc_bifrost/elvis.config +++ b/src/oc_bifrost/elvis.config @@ -12,8 +12,8 @@ {elvis_style, no_if_expression}, {elvis_style, no_debug_call}, {elvis_style, no_nested_try_catch}, - {elvis_style, no_tabs}, - {elvis_style, no_trailing_whitespace}, + {elvis_text_style, no_tabs}, + {elvis_text_style, no_trailing_whitespace}, {elvis_style, operator_spaces}, {elvis_style, used_ignored_variable}, {elvis_style, variable_naming_convention} diff --git a/src/oc_bifrost/habitat/plan.sh b/src/oc_bifrost/habitat/plan.sh index 04f7c34200..3e48b2306c 100644 --- a/src/oc_bifrost/habitat/plan.sh +++ b/src/oc_bifrost/habitat/plan.sh @@ -3,12 +3,12 @@ pkg_origin=chef pkg_license=('Apache-2.0') pkg_maintainer="The Chef Server Maintainers " pkg_deps=( - core/erlang24 + core/erlang26 core/cacerts core/coreutils core/curl core/gcc-libs - core/sqitch_pg + core/sqitch ) pkg_build_deps=(core/make core/git core/gcc) pkg_bin_dirs=(bin) diff --git a/src/oc_bifrost/rebar.config b/src/oc_bifrost/rebar.config index 883ac272d6..e4778432cf 100644 --- a/src/oc_bifrost/rebar.config +++ b/src/oc_bifrost/rebar.config @@ -9,7 +9,7 @@ ]}. {erl_first_files, ["src/bifrost_wm.erl"]}. -{require_otp_vsn, "24.3.2"}. +{require_otp_vsn, "26.2.5.2"}. {deps, [ %% lager has to come first since we use its parse transform @@ -21,8 +21,8 @@ {git, "https://github.com/uwiger/edown", {branch, "master"}}}, {ej, ".*", {git, "https://github.com/chef/ej", {branch, "master"}}}, - {eper, ".*", - {git, "https://github.com/massemanet/eper", {branch, "master"}}}, +% {eper, ".*", +% {git, "https://github.com/massemanet/eper", {branch, "master"}}}, %% latest version of jiffy i could find that doesn't %% break oc_erchef_unit and oc_bifrost on master verify %% pipeline. could also try the two commits directly @@ -53,6 +53,8 @@ {eunit_opts, [verbose]}. +{dialyzer, [{warnings, [no_unknown]}]}. + {cover_enabled, true}. {profiles, [ @@ -92,7 +94,7 @@ {mixer, load}, syntax_tools, compiler, - eper, + %eper, observer_cli, opscoderl_wm, ej, diff --git a/src/oc_bifrost/rebar.lock b/src/oc_bifrost/rebar.lock index 721626374d..e31cb4653f 100644 --- a/src/oc_bifrost/rebar.lock +++ b/src/oc_bifrost/rebar.lock @@ -1,6 +1,6 @@ [{<<"chef_secrets">>, {git,"https://github.com/chef/chef_secrets", - {ref,"6fa36689fd599602e5985587a1497282df2d907a"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"edown">>, {git,"https://github.com/uwiger/edown", @@ -8,19 +8,15 @@ 0}, {<<"ej">>, {git,"https://github.com/chef/ej", - {ref,"f843f4da1cb7d8d2414adccc37fe523e3f92d789"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"envy">>, {git,"https://github.com/manderson26/envy.git", {ref,"0148fb4b7ed0e188511578e98b42d6e7dde0ebd1"}}, 1}, - {<<"eper">>, - {git,"https://github.com/massemanet/eper", - {ref,"17b0f97ea8287b72e8ebbe7132214db182ff1a1d"}}, - 0}, {<<"epgsql">>, {git,"https://github.com/chef/epgsql-1.git", - {ref,"34b4182f0e21f9189ddd7b2e290f01a9e7d93bf1"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"fs">>, {git,"https://github.com/synrc/fs", @@ -56,11 +52,11 @@ 0}, {<<"opscoderl_wm">>, {git,"https://github.com/chef/opscoderl_wm", - {ref,"5436cc600db462226a5d2f3ed585ab39eaf20ee5"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"pooler">>, {git,"https://github.com/chef/pooler.git", - {ref,"681c355abaacc5487ddf41a84b9ed53151a765fe"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"recon">>, {git,"https://github.com/ferd/recon/", @@ -68,7 +64,7 @@ 1}, {<<"sqerl">>, {git,"https://github.com/chef/sqerl", - {ref,"ebbe4c20ab5cd21041229d22dd60a6b38aa2930c"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"stats_hero">>, {git,"https://github.com/chef/stats_hero", @@ -80,5 +76,5 @@ 0}, {<<"webmachine">>, {git,"https://github.com/chef/webmachine", - {ref,"1389b01a9fbc25d36aad8956e08d2d0db242625f"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}]. diff --git a/src/oc_bifrost/rebar3 b/src/oc_bifrost/rebar3 index ed2a36d577..bf21708519 100755 Binary files a/src/oc_bifrost/rebar3 and b/src/oc_bifrost/rebar3 differ diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_clients.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_clients.erl index 30c2c33db3..20034dfaf2 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_clients.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_clients.erl @@ -27,8 +27,8 @@ make_client(Prefix) -> "xwIDAQAB">>, pubkey_version = 1, last_updated_by = chef_test_suite_helper:actor_id(), - created_at = {datetime, {{2011,10,1},{16,47,46}}}, - updated_at = {datetime, {{2011,10,1},{16,47,46}}} + created_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}} }. insert_client_data() -> diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_cookbook_versions.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_cookbook_versions.erl index f5220f9cd2..e7a44a4b30 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_cookbook_versions.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_cookbook_versions.erl @@ -55,8 +55,8 @@ insert_cbv_null_id() -> meta_long_desc= <<"">>, metadata= <<"">>, last_updated_by= chef_test_suite_helper:actor_id(), - created_at= {datetime, {{2011,10,1},{16,47,46}}}, - updated_at= {datetime, {{2011,10,1},{16,47,46}}}, + created_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, serialized_object= <<"">>, checksums = [] }, diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag.erl index 419eb01c98..1526779598 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag.erl @@ -16,8 +16,8 @@ make_data_bag(Prefix) -> authz_id = AzId, org_id = OrgId, name = Name, last_updated_by = chef_test_suite_helper:actor_id(), - created_at = {datetime, {{2011,10,1},{16,47,46}}}, - updated_at = {datetime, {{2011,10,1},{16,47,46}}} }. + created_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}} }. data_bags() -> [make_data_bag(<<"01">>), diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag_item.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag_item.erl index dcfdfced12..ea178a822c 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag_item.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_data_bag_item.erl @@ -13,8 +13,8 @@ make_data_bag_item(Prefix, BagName) -> #chef_data_bag_item{server_api_version = ?API_MIN_VER, id= Id, org_id= chef_test_suite_helper:the_org_id(), item_name= Name, data_bag_name= BagName, last_updated_by= chef_test_suite_helper:actor_id(), - created_at= {datetime, {{2011,10,1},{16,47,46}}}, - updated_at= {datetime, {{2011,10,1},{16,47,46}}}, + created_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, serialized_object= Prefix }. data_bag_items() -> @@ -37,7 +37,7 @@ fetch_data_bag_items() -> Results = itest_util:list_records(hd(DBS)), ?assertEqual(Expected, Results). -fetch_data_bag_item()-> +fetch_data_bag_item() -> Item = hd(data_bag_items()), {ok, Got} = itest_util:fetch_record(Item), @@ -52,9 +52,9 @@ fetch_data_bag_item_ids() -> {ok, Results} = chef_sql:fetch_data_bag_item_ids(chef_test_suite_helper:the_org_id(), <<"data_bag_02">>), ?assertEqual(Expected,Results). -bulk_get_data_bag_items()-> ok. +bulk_get_data_bag_items() -> ok. -update_data_bag_item()-> +update_data_bag_item() -> [Old | _T] = [ Db || Db <- data_bag_items(), Db#chef_data_bag_item.org_id =:= chef_test_suite_helper:the_org_id(), @@ -68,7 +68,7 @@ update_data_bag_item()-> (FResults#chef_data_bag_item.serialized_object)). -delete_data_bag_item()-> +delete_data_bag_item() -> Item = hd(data_bag_items()), {ok, DResults} = itest_util:delete_record(Item), ?assertEqual(1, DResults), diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_environment_cookbooks.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_environment_cookbooks.erl index 6d2a62629f..e06c93fe5e 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_environment_cookbooks.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_environment_cookbooks.erl @@ -327,7 +327,7 @@ environment_filtered_cookbooks_spec() -> environment_from_spec({Prefix, Properties}) -> make_environment(Prefix, Properties); -environment_from_spec(Prefix) when is_binary(Prefix)-> +environment_from_spec(Prefix) when is_binary(Prefix) -> make_environment(Prefix, []). %% TODO: This doesn't handle the "_default" environment yet @@ -360,7 +360,7 @@ make_environment(Prefix, Properties) -> process_environment_property(cookbook_versions=Property, Properties) -> case proplists:lookup(Property, Properties) of - {Property, Value} when is_list(Value)-> + {Property, Value} when is_list(Value) -> %% Wrap for EJson {Value}; none -> @@ -373,4 +373,4 @@ environment_name_from_prefix(Prefix) -> %% TODO: Other places use this same date... they should use this %% function, instead default_date() -> - {datetime, {{2011,10,1},{16,47,46}}}. + {datetime, {{2011, 10, 1}, {16, 47, 46}}}. diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_sandboxes.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_sandboxes.erl index d1e6a48dd9..bbdb77bf02 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_sandboxes.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_sandboxes.erl @@ -10,7 +10,7 @@ make_sandbox(Prefix) -> #chef_sandbox{id=chef_test_suite_helper:make_id(Prefix), org_id=chef_test_suite_helper:the_org_id(), - created_at = {datetime,{{2011,10,1},{16,47,46}}}, + created_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}}, checksums=[{<<"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, false}, {<<"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb">>, false}, {<<"cccccccccccccccccccccccccccccccc">>, false}, @@ -42,7 +42,7 @@ fetch_sandbox() -> id = chef_test_suite_helper:make_id(<<"abcd">>)}), ?assertEqual(#chef_sandbox{id=chef_test_suite_helper:make_id(<<"abcd">>), org_id=chef_test_suite_helper:the_org_id(), - created_at={datetime,{{2011,10,1},{16,47,46}}}, + created_at={datetime, {{2011, 10, 1}, {16, 47, 46}}}, checksums=[{<<"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, true}, {<<"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb">>, true}, {<<"cccccccccccccccccccccccccccccccc">>, true}, diff --git a/src/oc_erchef/apps/chef_db/itest/chef_sql_users.erl b/src/oc_erchef/apps/chef_db/itest/chef_sql_users.erl index 59cd50e704..c86d2a5081 100644 --- a/src/oc_erchef/apps/chef_db/itest/chef_sql_users.erl +++ b/src/oc_erchef/apps/chef_db/itest/chef_sql_users.erl @@ -65,8 +65,8 @@ chef_user_record(AzId, _Admin) -> salt = <<"kosher">>, hash_type = <<"bcrypt">>, last_updated_by = chef_test_suite_helper:actor_id(), - created_at = {datetime,{{2011,10,1},{16,47,46}}}, - updated_at = {datetime,{{2011,10,1},{16,47,46}}}, + created_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at = {datetime, {{2011, 10, 1}, {16, 47, 46}}}, external_authentication_uid = <<"an open id of some kind">>, recovery_authentication_enabled = false, serialized_object = <<"{}">> diff --git a/src/oc_erchef/apps/chef_db/itest/itest_cookbook_util.erl b/src/oc_erchef/apps/chef_db/itest/itest_cookbook_util.erl index db05994a49..b6c4294c09 100644 --- a/src/oc_erchef/apps/chef_db/itest/itest_cookbook_util.erl +++ b/src/oc_erchef/apps/chef_db/itest/itest_cookbook_util.erl @@ -184,7 +184,7 @@ generate_cookbook_and_versions({CookbookPrefix, Versions}) when is_binary(Cookbo {Cookbook, CookbookVersions}. -spec add_cookbook_versions_to_db([#chef_cookbook_version{}]) -> ok. -add_cookbook_versions_to_db(CookbookVersions) when is_list(CookbookVersions)-> +add_cookbook_versions_to_db(CookbookVersions) when is_list(CookbookVersions) -> [ chef_sql:create_cookbook_version(V) || V <- CookbookVersions ], ok. @@ -289,8 +289,8 @@ make_cookbook_version(Prefix, Version, {AuthzId, OrgId, Name}, Properties) when meta_long_desc= <<"">>, metadata=Prefix, last_updated_by= chef_test_suite_helper:actor_id(), - created_at= {datetime, {{2011,10,1},{16,47,46}}}, - updated_at= {datetime, {{2011,10,1},{16,47,46}}}, + created_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, + updated_at= {datetime, {{2011, 10, 1}, {16, 47, 46}}}, serialized_object= process_property(serialized_object, Properties), checksums = [] }. @@ -314,7 +314,7 @@ process_property(dependencies=Property, Properties) -> {Property, Binary} when is_binary(Binary) -> %% Already JSON Binary ; - {Property, PropertyList} when is_list(PropertyList)-> + {Property, PropertyList} when is_list(PropertyList) -> chef_json:encode({PropertyList}); none -> chef_json:encode({[]}) diff --git a/src/oc_erchef/apps/chef_db/src/chef_db.erl b/src/oc_erchef/apps/chef_db/src/chef_db.erl index 1082bef912..c5c17084da 100644 --- a/src/oc_erchef/apps/chef_db/src/chef_db.erl +++ b/src/oc_erchef/apps/chef_db/src/chef_db.erl @@ -168,7 +168,7 @@ create(ObjectRec0, #context{server_api_version = ApiVersion, reqid = ReqId}, Act case stats_hero:ctime(ReqId, {chef_sql, create_object}, fun() -> chef_sql:create_object(QueryName, Fields) end) of {ok, 1} -> ok; - {conflict, Msg}-> {conflict, Msg}; + {conflict, Msg} -> {conflict, Msg}; {error, Why} -> {error, Why} end. @@ -312,7 +312,7 @@ fetch_requestor(Context, OrgId, ClientName) -> %% @doc fetches requestor records for all actors matching by name. Note that %% if orgid is undefined, it will only retrieve records corresponding to users, %% and not clients. --spec fetch_requestors(#context{}, binary()|undefined, binary()) -> +-spec fetch_requestors(#context{}, binary() | undefined, binary()) -> [#chef_requestor{}] | not_found | {error, term()}. @@ -350,7 +350,7 @@ make_sandbox(#context{}=Ctx, OrgId, ActorId, Checksums) -> end. -spec cookbook_exists(DbContext :: #context{}, - OrgId::object_id(), + OrgId :: object_id(), CookbookName :: binary()) -> boolean() | {error, term()}. cookbook_exists(#context{reqid=ReqId}, OrgId, CookbookName) -> @@ -358,14 +358,14 @@ cookbook_exists(#context{reqid=ReqId}, OrgId, CookbookName) -> %% @doc Given a list of cookbook names and versions, return a list of #chef_cookbook_version %% objects. This is used by the depsolver endpoint. --spec bulk_fetch_minimal_cookbook_versions(DbContext :: #context{}, OrgId:: object_id(), [versioned_cookbook()]) -> [#chef_cookbook_version{}] | {error, any()}. +-spec bulk_fetch_minimal_cookbook_versions(DbContext :: #context{}, OrgId :: object_id(), [versioned_cookbook()]) -> [#chef_cookbook_version{}] | {error, any()}. bulk_fetch_minimal_cookbook_versions(#context{}, _OrgId, []) -> %% Avoid database calls in the case of an empty run_list []; bulk_fetch_minimal_cookbook_versions(#context{reqid = ReqID}, OrgId, VersionedCookbooks) -> ?SH_TIME(ReqID, chef_sql, bulk_fetch_minimal_cookbook_versions , (OrgId, VersionedCookbooks)). --spec fetch_cookbook_versions(#context{},object_id()) -> [versioned_cookbook()] | {error, any()}. +-spec fetch_cookbook_versions(#context{}, object_id()) -> [versioned_cookbook()] | {error, any()}. %% @doc Return a list of all cookbook names and versions in an org fetch_cookbook_versions(#context{} = Ctx, OrgId) -> fetch_objects(Ctx, fetch_cookbook_versions, OrgId). @@ -384,9 +384,9 @@ fetch_cookbook_versions(#context{} = Ctx, OrgId, CookbookName) -> fetch_cookbook_version(#context{reqid = ReqId}, OrgId, VersionedCookbook) -> ?SH_TIME(ReqId, chef_sql, fetch_cookbook_version, (OrgId, VersionedCookbook)). --spec fetch_latest_cookbook_version(Ctx::#context{}, - OrgId::object_id(), - CookbookName::binary()) -> +-spec fetch_latest_cookbook_version(Ctx :: #context{}, + OrgId :: object_id(), + CookbookName :: binary()) -> #chef_cookbook_version{} | not_found | {error, term()}. @@ -429,7 +429,7 @@ fetch_latest_cookbook_versions(#context{reqid=ReqId}, OrgId, NumberOfVersions) - %% @doc Retrieves the list of all recipes from the latest version of all an organization's %% cook}books and returns their cookbook-qualified names. -spec fetch_latest_cookbook_recipes(DbContext :: #context{}, - OrgId::object_id()) -> + OrgId :: object_id()) -> [CookbookQualifiedRecipeName :: binary()] | {error, Reason :: term()}. fetch_latest_cookbook_recipes(#context{reqid=ReqId}, OrgId) -> @@ -445,7 +445,7 @@ fetch_latest_cookbook_recipes(#context{reqid=ReqId}, OrgId) -> %% %% See the corresponding function in the chef_sql module for more information. -spec fetch_all_cookbook_version_dependencies(DbContext :: #context{}, - OrgId::object_id()) -> [depsolver:dependency_set()] | + OrgId :: object_id()) -> [depsolver:dependency_set()] | {error, term()}. fetch_all_cookbook_version_dependencies(#context{reqid=ReqId}, OrgId) -> case ?SH_TIME(ReqId, chef_sql, fetch_all_cookbook_version_dependencies, (OrgId)) of @@ -464,10 +464,10 @@ fetch_all_cookbook_version_dependencies(#context{reqid=ReqId}, OrgId) -> %% if it is a cookbook name (i.e., a binary), then information on just that cookbook will be %% returned. -spec fetch_environment_filtered_cookbook_versions(DbContext :: #context{}, - OrgId::object_id(), + OrgId :: object_id(), EnvName :: binary(), CookbookName :: binary() | all, - NumVersions :: all | non_neg_integer()) -> + NumVersions :: all | non_neg_integer()) -> [{CookbookName :: binary(), [Version :: binary()]}] | {error, term()}. fetch_environment_filtered_cookbook_versions(#context{reqid=ReqId}=DbContext, OrgId, EnvName, CookbookName, NumVersions) -> @@ -571,7 +571,7 @@ is_user_in_org(#context{reqid = ReqId}, UserName, OrgName) -> ?SH_TIME(ReqId, chef_sql, is_user_in_org, (UserName, OrgName)). -spec bulk_get(#context{}, binary(), chef_type(), [binary()]) -> - [binary()|ej:json_object()] | {error, _}. + [binary() | ej:json_object()] | {error, _}. %% @doc Return a list of JSON/gzip'd JSON as binary corresponding to the specified list of %% IDs. bulk_get(#context{reqid = ReqId}, _, node, Ids) -> @@ -673,7 +673,7 @@ create_object(#context{server_api_version = ApiVersion, reqid = ReqId}, Fun, Obj case stats_hero:ctime(ReqId, {chef_sql, Fun}, fun() -> chef_sql:Fun(Object2) end) of {ok, 1} -> ok; - {conflict, Msg}-> {conflict, Msg}; + {conflict, Msg} -> {conflict, Msg}; {error, Why} -> {error, Why} end. diff --git a/src/oc_erchef/apps/chef_db/src/chef_sql.erl b/src/oc_erchef/apps/chef_db/src/chef_sql.erl index 76bfda94f5..51b27f25c7 100644 --- a/src/oc_erchef/apps/chef_db/src/chef_sql.erl +++ b/src/oc_erchef/apps/chef_db/src/chef_sql.erl @@ -155,7 +155,7 @@ fetch_org_metadata(OrgName) -> {ok, none} -> not_found; {ok, L} when is_list(L) -> - Guid = proplists:get_value(<<"id">>,L), + Guid = proplists:get_value(<<"id">>, L), AuthzId = proplists:get_value(<<"authz_id">>, L), {Guid, AuthzId}; {error, Error} -> @@ -242,7 +242,7 @@ bulk_get_clients(ApiVersion, Ids) -> {error, Error} end. --spec fetch_cookbook_versions(OrgId::object_id()) -> +-spec fetch_cookbook_versions(OrgId :: object_id()) -> {ok, [versioned_cookbook()]} | {error, term()}. %% @doc Return list of [cookbook name, version()] for a given organization. The list is returned sort %% by name, major, minor, patch fields. @@ -250,7 +250,7 @@ fetch_cookbook_versions(OrgId) -> QueryName = list_cookbook_versions_by_orgid, cookbook_versions_from_db(QueryName, [OrgId]). --spec fetch_cookbook_versions(OrgId::object_id(), CookbookName::binary()) -> +-spec fetch_cookbook_versions(OrgId :: object_id(), CookbookName :: binary()) -> {ok, [versioned_cookbook()]} | {error, term()}. %% @doc Return list of [cookbook name, version()] for a given organization and cookbook. %% The list is returned sorted by name, major, minor, patch fields. @@ -317,7 +317,7 @@ fetch_latest_cookbook_versions(OrgId, CookbookName, NumberOfVersions) -> Raw = lists:foldl(fun(Row, Acc) -> N = proplists:get_value(<<"name">>, Row), V = proplists:get_value(<<"version">>, Row), - [{N,V}|Acc] + [{N, V} | Acc] end, [], CookbookVersions), @@ -331,8 +331,8 @@ fetch_latest_cookbook_versions(OrgId, CookbookName, NumberOfVersions) -> %% recipes within the opaque blob that is the serialized_object; this %% is a GZipped JSON string, which will need to be decompressed and %% decoded from JSON before the actual recipes can be extracted. --spec fetch_latest_cookbook_recipes(OrgId::object_id()) -> {ok, [{CookbookName::binary(), - SerializedObject::binary()}]} | +-spec fetch_latest_cookbook_recipes(OrgId :: object_id()) -> {ok, [{CookbookName :: binary(), + SerializedObject :: binary()}]} | {error, term()}. fetch_latest_cookbook_recipes(OrgId) -> case sqerl:select(fetch_latest_cookbook_recipes_by_orgid, [OrgId]) of @@ -521,7 +521,7 @@ fetch_environment_filtered_recipes(OrgId, Environment) -> end. %% cookbook version ops --spec bulk_fetch_minimal_cookbook_versions(OrgId::object_id(), [versioned_cookbook()]) -> +-spec bulk_fetch_minimal_cookbook_versions(OrgId :: object_id(), [versioned_cookbook()]) -> [#chef_cookbook_version{}]. bulk_fetch_minimal_cookbook_versions(OrgId, CookbookVersions) -> QueryParam = cookbook_versions_array_to_binary(CookbookVersions), @@ -539,7 +539,7 @@ cookbook_versions_array_to_binary(CookbookVersions) -> cookbook_versions_array_to_binary(CookbookVersions, <<"{">>, <<"}">>, <<"">>). -spec cookbook_versions_array_to_binary([versioned_cookbook()], binary(), binary(), binary()) -> binary(). -cookbook_versions_array_to_binary([CkbVer|CookbookVersions], Acc, EndBin, Sep) -> +cookbook_versions_array_to_binary([CkbVer | CookbookVersions], Acc, EndBin, Sep) -> CkbBin = cookbook_version_to_binary(CkbVer), cookbook_versions_array_to_binary(CookbookVersions, <>, @@ -567,7 +567,7 @@ cookbook_version_to_binary({Name, {MajorInt, MinorInt, PatchInt}}) -> integer_to_binary(MinorInt), <<",">>, integer_to_binary(PatchInt), <<")\"">>]). --spec fetch_cookbook_version(OrgId::object_id(), +-spec fetch_cookbook_version(OrgId :: object_id(), versioned_cookbook()) -> #chef_cookbook_version{} | {cookbook_exists, object_id()} | not_found | @@ -582,7 +582,7 @@ fetch_cookbook_version(OrgId, {Name, {Major, Minor, Patch}}) -> Checksums when is_list(Checksums) -> CBVersion#chef_cookbook_version{checksums = Checksums}; {error, Error} -> - {error,Error} + {error, Error} end; {ok, none} -> %% check if we have a cookbook entry @@ -600,8 +600,8 @@ fetch_cookbook_version(OrgId, {Name, {Major, Minor, Patch}}) -> %% TODO: Refactor this to use num_versions --spec fetch_latest_cookbook_version(OrgId::object_id(), - CookbookName::binary()) -> +-spec fetch_latest_cookbook_version(OrgId :: object_id(), + CookbookName :: binary()) -> #chef_cookbook_version{} | not_found | {error, term()}. %% @doc Return the latest version of the requested cookbook fetch_latest_cookbook_version(OrgId, CookbookName) -> @@ -646,7 +646,7 @@ update_cookbook_version(#chef_cookbook_version{ id = Id, {ok, Additions, Deletions} -> UpdatedFields = [Frozen, MetaAttributes, MetaDeps, MetaLongDesc, Metadata, SerializeObject, LastUpdatedBy, UpdatedAt, Id], case do_update(update_cookbook_version, UpdatedFields) of - {ok, _} -> #chef_db_cb_version_update{added_checksums=Additions,deleted_checksums=Deletions}; + {ok, _} -> #chef_db_cb_version_update{added_checksums=Additions, deleted_checksums=Deletions}; Error -> Error end; {error, Reason} -> @@ -788,7 +788,7 @@ select_rows({Query, BindParameters}) -> select_rows({Query, BindParameters, Transform}) when is_tuple(Transform); Transform == rows -> match_result(sqerl:select(Query, BindParameters, Transform)); -select_rows({Query, BindParameters, Fields = [_|_]}) -> +select_rows({Query, BindParameters, Fields = [_ | _]}) -> match_result(sqerl:select(Query, BindParameters, rows_as_scalars, Fields)). -spec match_result(Input) -> NormalizedResult when @@ -830,7 +830,7 @@ delete_sandbox(SandboxId) when is_binary(SandboxId) -> {'ok', 'none' | number()}. mark_checksums_as_uploaded(_OrgId, []) -> ok; -mark_checksums_as_uploaded(OrgId, [Checksum|Rest]) -> +mark_checksums_as_uploaded(OrgId, [Checksum | Rest]) -> case sqerl:statement(insert_checksum, [OrgId, Checksum], count) of {ok, 1} -> mark_checksums_as_uploaded(OrgId, Rest); @@ -887,7 +887,7 @@ fetch_object_names(StubRec) -> L; not_found -> []; - {error, _} = Error-> + {error, _} = Error -> Error end. @@ -993,7 +993,7 @@ create_object(#chef_sandbox{id=SandboxId, Error end. -spec create_object(atom(), tuple() | list()) -> {ok, non_neg_integer()} | - {ok,[[{binary(),<<>>}]]} | + {ok, [[{binary(), <<>>}]]} | {error, term()} | {conflict, term()}. %% okay, that's pretty ugly, but no more so than all the hacks in here and @@ -1013,7 +1013,7 @@ create_object(QueryName, Args) when QueryName =:= insert_user; is_list(Args) -> case sqerl:select(QueryName, Args, first_as_scalar, [add_user]) of {ok, 1} -> {ok, 1}; - {ok, 0} -> {error, <<"Record not created '", (term_to_binary(QueryName))/binary, "'.">>}; + {ok, 0} -> {error, <<"Record not created '", (term_to_binary(QueryName, [{minor_version, 1}]))/binary, "'.">>}; Error -> Error end; create_object(QueryName, Args) when is_atom(QueryName), is_list(Args) -> @@ -1027,12 +1027,12 @@ create_object(QueryName, Record) when is_atom(QueryName) -> %% Returns 'ok' if all the records were inserted. Returns an error tuple %% on the first error it detects. Further processing of the list is %% abandoned at that point. --spec insert_cookbook_checksums(Checksums:: list(), bin_or_string(), bin_or_string(), +-spec insert_cookbook_checksums(Checksums :: list(), bin_or_string(), bin_or_string(), non_neg_integer(), non_neg_integer(), non_neg_integer()) -> ok | {error, term()}. insert_cookbook_checksums([], _OrgId, _Name, _Major, _Minor, _Patch) -> ok; -insert_cookbook_checksums([Checksum|Rest], OrgId, Name, Major, Minor, Patch) -> +insert_cookbook_checksums([Checksum | Rest], OrgId, Name, Major, Minor, Patch) -> case sqerl:statement(insert_cookbook_version_checksum, [Checksum, OrgId, Name, OrgId, Major, Minor, Patch], count) of {ok, 1} -> insert_cookbook_checksums(Rest, OrgId, Name, Major, Minor, Patch); @@ -1047,7 +1047,7 @@ insert_cookbook_checksums([Checksum|Rest], OrgId, Name, Major, Minor, Patch) -> %% version (for that, see `unlink_all_checksums_from_cbv/2'). unlink_checksums_from_cbv([], _OrgId, _CookbookVersionId) -> ok; -unlink_checksums_from_cbv([Checksum|Rest], OrgId, CookbookVersionId) -> +unlink_checksums_from_cbv([Checksum | Rest], OrgId, CookbookVersionId) -> case sqerl:statement(delete_cookbook_version_checksum, [Checksum, OrgId, CookbookVersionId], count) of {ok, _Count} -> unlink_checksums_from_cbv(Rest, OrgId, CookbookVersionId); @@ -1099,7 +1099,7 @@ do_update(QueryName, UpdateFields) when QueryName =:= update_user_by_id; is_list(UpdateFields) -> case sqerl:select(QueryName, UpdateFields, first_as_scalar, [update_user]) of {ok, 1} -> {ok, 1}; - {ok, 0} -> {error, <<"Record not updated '", (term_to_binary(QueryName))/binary, "'.">>}; + {ok, 0} -> {error, <<"Record not updated '", (term_to_binary(QueryName, [{minor_version, 1}]))/binary, "'.">>}; {ok, none} -> {ok, not_found}; Error -> Error @@ -1123,7 +1123,7 @@ do_update(QueryName, UpdateFields) -> %% processing of the list is abandoned at that point. insert_sandboxed_checksums([], _OrgId, _SandboxId, _CreatedAt) -> ok; -insert_sandboxed_checksums([Checksum|Rest], OrgId, SandboxId, CreatedAt) -> +insert_sandboxed_checksums([Checksum | Rest], OrgId, SandboxId, CreatedAt) -> case sqerl:statement(insert_sandboxed_checksum, [OrgId, SandboxId, Checksum, CreatedAt], count) of {ok, 1} -> insert_sandboxed_checksums(Rest, OrgId, SandboxId, CreatedAt); @@ -1146,19 +1146,19 @@ cookbook_exists(OrgId, CookbookName) -> {error, Reason} end. --spec create_cookbook_if_needed(CookbookVersion::#chef_cookbook_version{}) -> +-spec create_cookbook_if_needed(CookbookVersion :: #chef_cookbook_version{}) -> ok | {error, term()}. %% @doc Helper function which creates a row in the cookbook table if it %% not already there create_cookbook_if_needed(#chef_cookbook_version{org_id = OrgId, name = Name}=CookbookVersion) -> create_cookbook_if_needed(cookbook_exists(OrgId, Name), CookbookVersion). --spec create_cookbook_if_needed(Exists::boolean(), - CookbookVersion::#chef_cookbook_version{}) -> +-spec create_cookbook_if_needed(Exists :: boolean(), + CookbookVersion :: #chef_cookbook_version{}) -> ok | {error, term()}. create_cookbook_if_needed(false, #chef_cookbook_version{authz_id = AuthzId, - org_id = OrgId, - name = Name}) -> + org_id = OrgId, + name = Name}) -> case sqerl:statement(insert_cookbook, [AuthzId, OrgId, Name], count) of {ok, N} when is_integer(N) -> ok; @@ -1177,8 +1177,8 @@ create_cookbook_if_needed({error, Reason}, _CookbookVersion) -> %% %% TODO: We could extract out the case sqerl:select statement and logic for all the methods %% that use rows_as_scalars transform --spec fetch_cookbook_version_checksums(OrgId::object_id(), - CookbookVersionId::object_id()) -> +-spec fetch_cookbook_version_checksums(OrgId :: object_id(), + CookbookVersionId :: object_id()) -> [binary()] | {error, term()}. fetch_cookbook_version_checksums(OrgId, CookbookVersionId) when is_binary(OrgId), is_binary(CookbookVersionId) -> @@ -1192,7 +1192,7 @@ fetch_cookbook_version_checksums(OrgId, CookbookVersionId) when is_binary(OrgId) {error, Reason} end. --spec fetch_cookbook_authz(OrgId::object_id(), CookbookName::bin_or_string()) -> +-spec fetch_cookbook_authz(OrgId :: object_id(), CookbookName :: bin_or_string()) -> object_id() | not_found | {error, term()}. %% @doc helper function to return the AuthzId for a cookbook. fetch_cookbook_authz(OrgId, CookbookName) -> @@ -1212,8 +1212,8 @@ fetch_cookbook_authz(OrgId, CookbookName) -> %% from the database (because removing them from this cookbook version %% made them orphans); the corresponding files can now be safely %% removed from S3. --spec unlink_all_checksums_from_cbv(OrgId::object_id(), - CookbookVersionId::object_id()) -> +-spec unlink_all_checksums_from_cbv(OrgId :: object_id(), + CookbookVersionId :: object_id()) -> {ok, [binary()]} | {error, term()}. unlink_all_checksums_from_cbv(OrgId, CookbookVersionId) -> % retrieve a list of checksums before we delete the @@ -1232,8 +1232,8 @@ unlink_all_checksums_from_cbv(OrgId, CookbookVersionId) -> %% %% Returns a list of deleted checksums (a subset of `Checksums') %% for further upstream processing (i.e., deletion from S3). --spec delete_orphaned_checksums(OrgId::binary(), - Checksums::[binary()]) -> [binary()]. +-spec delete_orphaned_checksums(OrgId :: binary(), + Checksums :: [binary()]) -> [binary()]. delete_orphaned_checksums(OrgId, Checksums) -> %% we don't want to delete checksums associated with %% cookbook artifact versions @@ -1241,14 +1241,14 @@ delete_orphaned_checksums(OrgId, Checksums) -> lists:foldl(fun(Checksum, Acc) -> case sqerl:statement(delete_checksum_by_id, [OrgId, Checksum]) of {ok, N} when is_integer(N) -> %% pretend there is 1 - [Checksum|Acc]; + [Checksum | Acc]; {foreign_key, _} -> %% The checksum may still be associated with %% another cookbook version record which is OK! Acc; {error, Reason} -> error_logger:error_msg("Checksum deletion error: ~p~n" - "{~p,delete_orphaned_checksums,2,[{file,~p},{line,~p}]}~n", + "{~p,delete_orphaned_checksums,2,[{file,~p}, {line,~p}]}~n", [Reason, ?MODULE, ?FILE, ?LINE]), Acc end @@ -1294,9 +1294,9 @@ cookbook_versions_from_db(QueryName, Args) -> %% @doc helper function to convert from the three fields stored in the DB %% and convert it into a version() type as returned by the API --spec triple_to_version_tuple(Major::non_neg_integer(), - Minor::non_neg_integer(), - Patch::non_neg_integer()) -> version(). +-spec triple_to_version_tuple(Major :: non_neg_integer(), + Minor :: non_neg_integer(), + Patch :: non_neg_integer()) -> version(). triple_to_version_tuple(Major, Minor, Patch) -> {Major, Minor, Patch}. @@ -1316,11 +1316,11 @@ process_dependency_resultset(Rows) -> WorkingDependencySet :: [depsolver:dependency_set()]) -> FinalDependencySet :: [depsolver:dependency_set()]. %% This clause starts things off -process_dependency_resultset([CurrentRow|Rows], []) -> +process_dependency_resultset([CurrentRow | Rows], []) -> DependencySet = row_to_dependency_set(CurrentRow), process_dependency_resultset(Rows, [DependencySet]); %% This clause handles the "middle" -process_dependency_resultset([CurrentRow|Rows], [{LastCookbook, Versions} | DependencySet]) -> +process_dependency_resultset([CurrentRow | Rows], [{LastCookbook, Versions} | DependencySet]) -> {CurrentCookbook, [{Version, Dependencies}]} = row_to_dependency_set(CurrentRow), @@ -1401,7 +1401,7 @@ row_to_dependency_set(Row) -> -spec condense_depsolver_results([{CookbookName :: binary(), Version :: binary()}], NumVersions :: num_versions()) -> [{CookbookBin :: binary(), [ VersionBin :: binary()]}]. -condense_depsolver_results([First|Rest], NumVersions) -> +condense_depsolver_results([First | Rest], NumVersions) -> NumTaken = case NumVersions of 0 -> 0; _ -> 1 @@ -1418,8 +1418,8 @@ condense_depsolver_results(NumVersions, NumTaken, condense_depsolver_results(NumVersions, NumTaken, Rest, Processed); {CurrentCookbook, _} -> %% We haven't yet taken as many versions of this cookbook as we need yet - condense_depsolver_results(NumVersions, NumTaken+1, Rest, [{LastCookbook, [Version | Versions]} - | RestProcessed]); + condense_depsolver_results(NumVersions, NumTaken + 1, Rest, [{LastCookbook, [Version | Versions]} + | RestProcessed]); {_, _} -> %% We've switched to a "new" cookbook, and we may or may not need to keep some versions Num = case NumVersions of @@ -1545,7 +1545,7 @@ fetch_cookbook_version_serialized_objects(UnprocessedIds, BatchSize, AllResults) -spec fetch_cookbook_version_serialized_objects_batch([Ids :: integer()]) -> {ok, [{CookbookName :: binary(), SerializedObject :: binary()}]} | {error, term()}. -fetch_cookbook_version_serialized_objects_batch(Ids) when is_list(Ids)-> +fetch_cookbook_version_serialized_objects_batch(Ids) when is_list(Ids) -> case sqerl:select(bulk_get_cbv_serialized_object, [Ids]) of {ok, none} -> {ok, []}; @@ -1611,12 +1611,12 @@ list_policy_groups_for_policy_revision(RevisionID) -> {error, Reason} end. -policy_rev_by_group_rows_to_tuple([Row|Rest], Processed) -> +policy_rev_by_group_rows_to_tuple([Row | Rest], Processed) -> PolicyGroupName = proplists:get_value(<<"policy_group_name">>, Row), PolicyName = proplists:get_value(<<"policy_revision_name">>, Row), RevisionID = proplists:get_value(<<"policy_revision_revision_id">>, Row), ProcessedRow = {PolicyGroupName, PolicyName, RevisionID}, - policy_rev_by_group_rows_to_tuple(Rest, [ProcessedRow|Processed]); + policy_rev_by_group_rows_to_tuple(Rest, [ProcessedRow | Processed]); policy_rev_by_group_rows_to_tuple([], Processed) -> Processed. @@ -1624,7 +1624,7 @@ policy_rev_by_group_rows_to_tuple([], Processed) -> %% versions, presented as `{CookbookName, SerializedObject}' pairs. %% %% Recipe names are returned sorted alphabetically. --spec extract_recipe_names_from_serialized_objects([{CookbookName :: binary(), +-spec extract_recipe_names_from_serialized_objects([{CookbookName :: binary(), SerializedObject :: binary()}]) -> [ QualifiedRecipeName :: binary() ]. extract_recipe_names_from_serialized_objects(Pairs) -> @@ -1711,7 +1711,7 @@ dict_key_value_for_index(Index) when Index =:= node; %% This spec brought to you by -Wunderspecs -spec create_dict(Query :: dict_queries(), Args :: list(), - {Key :: <<_:32,_:_*40>>, %% <<"name">> | <<"item_name">> + {Key :: <<_:32, _:_*40>>, %% <<"name">> | <<"item_name">> Value :: <<_:16>>}) %% <<"id">> -> {ok, dict()} | {error, term()}. create_dict(Query, Args, {Key, Value}) -> @@ -1746,7 +1746,7 @@ proplist_results(Query, Args) -> %% %% This spec brought to you by -Wunderspecs -spec proplists_to_dict(ResultSetProplist :: [[tuple()]], - Key :: <<_:32,_:_*40>>, %% <<"name">> | <<"item_name">> + Key :: <<_:32, _:_*40>>, %% <<"name">> | <<"item_name">> Value :: <<_:16>>) -> dict(). %% <<"id">> proplists_to_dict(ResultSetProplist, Key, Value) -> lists:foldl(fun(Row, Dict) -> diff --git a/src/oc_erchef/apps/chef_db/test/chef_db_test_utils.erl b/src/oc_erchef/apps/chef_db/test/chef_db_test_utils.erl index 3ec70f7df9..f5813137e2 100644 --- a/src/oc_erchef/apps/chef_db/test/chef_db_test_utils.erl +++ b/src/oc_erchef/apps/chef_db/test/chef_db_test_utils.erl @@ -36,7 +36,7 @@ test_setup() -> start_stats_hero(), - Server = {context,<<"test-req-id">>,{server,"localhost",5984,[],[]}}, + Server = {context,<<"test-req-id">>, {server,"localhost",5984,[],[]}}, Superuser = <<"cb4dcaabd91a87675a14ec4f4a00050d">>, {Server, Superuser}. diff --git a/src/oc_erchef/apps/chef_index/src/chef_elasticsearch.erl b/src/oc_erchef/apps/chef_index/src/chef_elasticsearch.erl index 66d3241f1b..253b136998 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_elasticsearch.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_elasticsearch.erl @@ -125,7 +125,7 @@ query_body(#chef_solr_query{ {<<"size">>, Rows}, {<<"sort">>, [{[{<<"X_CHEF_id_CHEF_X">>, {[{<<"order">>, <<"asc">>}]}}]}]}, {<<"query">>, {[ - {<<"bool">>,{[ + {<<"bool">>, {[ {<<"must">>, {[query_string_query_ejson(Query)]}}, {<<"filter">>, {[query_string_query_ejson(FilterQuery)]}} ]}}]} @@ -142,9 +142,9 @@ query_string_query_ejson(QueryString) -> QueryEjson1 = case envy:get(chef_index, solr_elasticsearch_major_version, 2, non_neg_integer) of 7 -> QueryEjson; - _ -> [{<<"lowercase_expanded_terms">>, false}| QueryEjson] + _ -> [{<<"lowercase_expanded_terms">>, false} | QueryEjson] end, - {<<"query_string">>,{QueryEjson1}}. + {<<"query_string">>, {QueryEjson1}}. %% A note on deleting %% diff --git a/src/oc_erchef/apps/chef_index/src/chef_index.erl b/src/oc_erchef/apps/chef_index/src/chef_index.erl index b0feb53410..602d890c50 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_index.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_index.erl @@ -82,7 +82,7 @@ delete_search_db_by_type(OrgId, Type) -> solr -> chef_solr:delete_search_db_by_type(OrgId, Type) end. --spec query_from_params(binary()|string(), +-spec query_from_params(binary() | string(), string() | binary() | undefined, string(), string()) -> #chef_solr_query{}. diff --git a/src/oc_erchef/apps/chef_index/src/chef_index_batch.erl b/src/oc_erchef/apps/chef_index/src/chef_index_batch.erl index 3e2c07ebdd..2a8fde5876 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_index_batch.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_index_batch.erl @@ -179,7 +179,7 @@ flush(State = #chef_idx_batch_state{item_queue = Queue, spawn( fun() -> prometheus_gauge:inc(chef_index_batch_inflight_flushes_count), - lager:debug("Batch posting to ~s ~p documents (~p bytes)", [Provider, length(DocsToAdd), CurrentSize+WrapperSize]), + lager:debug("Batch posting to ~s ~p documents (~p bytes)", [Provider, length(DocsToAdd), CurrentSize + WrapperSize]), Now = erlang:monotonic_time(), Res = chef_index:update(Provider, Doc), Now1 = erlang:monotonic_time(), @@ -196,7 +196,7 @@ flush(State = #chef_idx_batch_state{item_queue = Queue, end, {0, 0}, Timestamps), - gen_server:cast(Self, {stats_update, TotalDocs, {BeforeDiff/TotalDocs, AfterDiff/TotalDocs}, Res}), + gen_server:cast(Self, {stats_update, TotalDocs, {BeforeDiff / TotalDocs, AfterDiff / TotalDocs}, Res}), [gen_server:reply(From, Res) || From <- PidsToReply], prometheus_gauge:dec(chef_index_batch_inflight_flushes_count) end), @@ -213,14 +213,14 @@ handle_call({add_item, Doc, Size, AddedTime}, From, prometheus_gauge:set(chef_index_batch_current_batch_size_bytes, Size), prometheus_gauge:set(chef_index_batch_current_batch_doc_count, 1), {noreply, State1#chef_idx_batch_state{ - item_queue = [{From, AddedTime, Doc}| Queue], + item_queue = [{From, AddedTime, Doc} | Queue], current_size = Size }}; handle_call({add_item, Doc, Size, AddedTime}, From, State = #chef_idx_batch_state{item_queue=Queue, current_size=CurrentSize}) -> CurrentSizeUpdated = CurrentSize + Size, prometheus_gauge:set(chef_index_batch_current_batch_size_bytes, CurrentSizeUpdated), prometheus_gauge:inc(chef_index_batch_current_batch_doc_count), - {noreply, State#chef_idx_batch_state{item_queue = [{From, AddedTime, Doc}|Queue], + {noreply, State#chef_idx_batch_state{item_queue = [{From, AddedTime, Doc} | Queue], current_size = CurrentSizeUpdated }}; handle_call(status, _From, State) -> @@ -252,8 +252,8 @@ collect_process_info() -> {total_heap_size, HeapSizeWords}, {memory, MemorySizeBytes}] -> WordSize = erlang:system_info(wordsize), - prometheus_gauge:set(chef_index_batch_stack_size_bytes, StackSizeWords*WordSize), - prometheus_gauge:set(chef_index_batch_heap_size_bytes, HeapSizeWords*WordSize), + prometheus_gauge:set(chef_index_batch_stack_size_bytes, StackSizeWords * WordSize), + prometheus_gauge:set(chef_index_batch_heap_size_bytes, HeapSizeWords * WordSize), prometheus_gauge:set(chef_index_batch_memory_size_bytes, MemorySizeBytes), prometheus_gauge:set(chef_index_batch_mailbox_size, MailboxSize); Other -> @@ -261,7 +261,7 @@ collect_process_info() -> end. -handle_cast({stats_update, TotalDocs, {AvgQueueLatency,AvgSuccessLatency}, Resp}, +handle_cast({stats_update, TotalDocs, {AvgQueueLatency, AvgSuccessLatency}, Resp}, State = #chef_idx_batch_state{avg_queue_latency = OQL, avg_success_latency = OSL, total_docs_queued = TQ, @@ -269,14 +269,14 @@ handle_cast({stats_update, TotalDocs, {AvgQueueLatency,AvgSuccessLatency}, Resp} }) -> collect_process_info(), TotalDocsQueuedUpdated = TQ + TotalDocs, - AvgQueueLatencyUpdated = ((AvgQueueLatency*TotalDocs)+(OQL*TQ))/(TQ+TotalDocs), + AvgQueueLatencyUpdated = ((AvgQueueLatency * TotalDocs) + (OQL * TQ)) / (TQ + TotalDocs), State1 = State#chef_idx_batch_state{total_docs_queued = TotalDocsQueuedUpdated, avg_queue_latency = AvgQueueLatencyUpdated}, case Resp of ok -> prometheus_counter:inc(chef_index_batch_successful_docs_total, TotalDocs), - TotalDocsSuccessUpdated = TS+TotalDocs, - AvgSuccessLatencyUpdated = ((AvgSuccessLatency*TotalDocs)+(OSL*TS))/(TS+TotalDocs), + TotalDocsSuccessUpdated = TS + TotalDocs, + AvgSuccessLatencyUpdated = ((AvgSuccessLatency * TotalDocs) + (OSL * TS)) / (TS + TotalDocs), {noreply, State1#chef_idx_batch_state{ total_docs_success = TotalDocsSuccessUpdated, avg_success_latency = AvgSuccessLatencyUpdated @@ -307,4 +307,4 @@ code_change(_OldVsn, State, _Extra) -> time_diff_in_ms(EndTime, StartTime) -> TimeTaken = EndTime - StartTime, TimeTakenInMicro = erlang:convert_time_unit(TimeTaken, native, microsecond), - TimeTakenInMicro/1000.0. + TimeTakenInMicro / 1000.0. diff --git a/src/oc_erchef/apps/chef_index/src/chef_index_expand.erl b/src/oc_erchef/apps/chef_index/src/chef_index_expand.erl index bacb4fd31b..3222a498e1 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_index_expand.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_index_expand.erl @@ -70,7 +70,7 @@ declare_metrics() -> doc_for_index(Index, Id, OrgId, Ejson) -> Start = erlang:monotonic_time(), Ret = make_doc_for_add(make_record(Index, Id, OrgId, Ejson)), - TimeTakenMS = erlang:convert_time_unit(erlang:monotonic_time() - Start, native, microsecond)/1000.0, + TimeTakenMS = erlang:convert_time_unit(erlang:monotonic_time() - Start, native, microsecond) / 1000.0, prometheus_histogram:observe(chef_index_expand_make_doc_for_index_duration_ms, TimeTakenMS), Ret. @@ -152,7 +152,7 @@ make_doc_for_add(Command = #chef_idx_expand_doc{id = Id, type=Type, search_provi {_, 7} -> IndexEjson; _ -> - [{<<"_type">>, <<"object">>}| IndexEjson] + [{<<"_type">>, <<"object">>} | IndexEjson] end, [jiffy:encode({[{<<"index">>, {IndexEjson1}}]}), <<"\n">>, @@ -183,7 +183,7 @@ get_object_type(ObjectType) -> %% @doc If we have a `data_bag_item' object, return a Solr field %% `data_bag', otherwise empty list. maybe_data_bag_field(Provider, DataBagName, ExistingFields) when is_binary(DataBagName) -> - [{<<"data_bag">>, chef_index:transform_data(Provider, DataBagName)}| ExistingFields]; + [{<<"data_bag">>, chef_index:transform_data(Provider, DataBagName)} | ExistingFields]; maybe_data_bag_field(_Provider, _DataBagName, ExistingFields) -> ExistingFields. @@ -250,14 +250,14 @@ expand_list(Mod, Keys, List, Acc) -> expand_obj(Mod, Keys, {PL}, Acc) -> lists:foldl(fun({K, V}, MyAcc) -> MyAcc1 = expand(Mod, Keys, K, MyAcc), - expand(Mod, [K|Keys], V, MyAcc1) + expand(Mod, [K | Keys], V, MyAcc1) end, Acc, PL). add_kv_pair(_Mod, [], _Value, Acc) -> Acc; add_kv_pair(Mod, [K], Value, Acc) -> [encode_pair(Mod, K, Value) | Acc]; -add_kv_pair(Mod, [K|_]=Keys, Value, Acc) -> +add_kv_pair(Mod, [K | _] = Keys, Value, Acc) -> [encode_pair(Mod, join_keys(Keys, ?SEP), Value), encode_pair(Mod, K, Value) | Acc]. diff --git a/src/oc_erchef/apps/chef_index/src/chef_index_http.erl b/src/oc_erchef/apps/chef_index/src/chef_index_http.erl index c1bfcbe828..0ed4e84b2b 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_index_http.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_index_http.erl @@ -27,7 +27,7 @@ request(Path, Method, Body, Headers) -> EndTime = erlang:monotonic_time(), TimeTaken = EndTime - StartTime, TimeTakenInMicro = erlang:convert_time_unit(TimeTaken, native, microsecond), - TimeTakenInMillis = TimeTakenInMicro/1000.0, + TimeTakenInMillis = TimeTakenInMicro / 1000.0, prometheus_histogram:observe(chef_index_http_req_duration_ms, [Method], TimeTakenInMillis), case Response of {ok, "200", _Head, _RespBody} -> @@ -122,8 +122,8 @@ get_headers() -> opensearch -> User = envy:get(chef_index, search_auth_username, "admin", string), Pass = envy:get(chef_index, search_auth_password, "admin", string), - Encoded = base64:encode_to_string(lists:append([User, ":" ,Pass])), - [{"Authorization","Basic " ++ Encoded}, {"Content-Type", "application/json"}]; + Encoded = base64:encode_to_string(lists:append([User, ":" , Pass])), + [{"Authorization", "Basic " ++ Encoded}, {"Content-Type", "application/json"}]; _ -> [{"Content-Type", "application/json"}] - end. \ No newline at end of file + end. diff --git a/src/oc_erchef/apps/chef_index/src/chef_index_query.erl b/src/oc_erchef/apps/chef_index/src/chef_index_query.erl index 3b68518fe8..992a730690 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_index_query.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_index_query.erl @@ -20,7 +20,7 @@ type_field() -> <<"X_CHEF_type_CHEF_X">>. -spec from_params(atom(), - binary()|string(), + binary() | string(), string() | binary() | undefined, string(), string()) -> #chef_solr_query{}. @@ -123,21 +123,21 @@ validate_non_neg(_Key, Int, _OrigValue) -> %% @doc Generates a constraint for chef_type %% @end --spec search_type_constraint(Type :: atom()) -> TypeConstraint :: [byte(),...]. +-spec search_type_constraint(Type :: atom()) -> TypeConstraint :: [byte(), ...]. search_type_constraint(Type) -> binary_to_list(type_field()) ++ atom_to_list(Type). %% @doc Generate a constraint for chef_database --spec search_db_from_orgid(OrgId :: binary()) -> DBName :: [byte(),...]. +-spec search_db_from_orgid(OrgId :: binary()) -> DBName :: [byte(), ...]. search_db_from_orgid(OrgId) -> binary_to_list(database_field()) ++ ":" ++ db_from_orgid(OrgId). db_from_orgid(OrgId) -> "chef_" ++ binary_to_list(OrgId). -hex2dec(X) when (X>=$0) andalso (X=<$9) -> X-$0; -hex2dec(X) when (X>=$A) andalso (X=<$F) -> X-$A+10; -hex2dec(X) when (X>=$a) andalso (X=<$f) -> X-$a+10. +hex2dec(X) when (X>=$0) andalso (X=<$9) -> X - $0; +hex2dec(X) when (X>=$A) andalso (X=<$F) -> X - $A + 10; +hex2dec(X) when (X>=$a) andalso (X=<$f) -> X - $a + 10. -type maybe_hex_uri() :: string() | binary(). %% A possibly hexadecimal encoded URI. -type uri() :: string() | binary(). @@ -148,10 +148,10 @@ http_uri_decode(String) when is_list(String) -> http_uri_decode(String) when is_binary(String) -> do_decode_binary(String). -do_decode([$%,Hex1,Hex2|Rest]) -> - [hex2dec(Hex1)*16+hex2dec(Hex2)|do_decode(Rest)]; -do_decode([First|Rest]) -> - [First|do_decode(Rest)]; +do_decode([$%, Hex1, Hex2 | Rest]) -> + [hex2dec(Hex1) * 16 + hex2dec(Hex2) | do_decode(Rest)]; +do_decode([First | Rest]) -> + [First | do_decode(Rest)]; do_decode([]) -> []. diff --git a/src/oc_erchef/apps/chef_index/src/chef_opensearch.erl b/src/oc_erchef/apps/chef_index/src/chef_opensearch.erl index 3d6eec7bc3..85e98a8cfa 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_opensearch.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_opensearch.erl @@ -120,7 +120,7 @@ query_body(#chef_solr_query{ {<<"size">>, Rows}, {<<"sort">>, [{[{<<"X_CHEF_id_CHEF_X">>, {[{<<"order">>, <<"asc">>}]}}]}]}, {<<"query">>, {[ - {<<"bool">>,{[ + {<<"bool">>, {[ {<<"must">>, {[query_string_query_ejson(Query)]}}, {<<"filter">>, {[query_string_query_ejson(FilterQuery)]}} ]}}]} @@ -130,7 +130,7 @@ fields_tag() -> <<"stored_fields">>. query_string_query_ejson(QueryString) -> - {<<"query_string">>,{[{<<"query">>, list_to_binary(QueryString)}]}}. + {<<"query_string">>, {[{<<"query">>, list_to_binary(QueryString)}]}}. %% A note on deleting %% diff --git a/src/oc_erchef/apps/chef_index/src/chef_solr.erl b/src/oc_erchef/apps/chef_index/src/chef_solr.erl index 29b82bab55..676502ac61 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_solr.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_solr.erl @@ -55,7 +55,7 @@ transform_data(Data) -> %% searching for to escape. Note that technically we don't need to %% escape `>' nor `"', symmetry and matching of a pre-existing Ruby %% implementation suggest otherwise. --spec xml_text_escape(binary()|[binary()]) -> binary()|[binary()]. +-spec xml_text_escape(binary() | [binary()]) -> binary() | [binary()]. xml_text_escape(BinStr) -> iolist_to_binary(xml_text_escape1(BinStr)). diff --git a/src/oc_erchef/apps/chef_index/src/chef_wait_group.erl b/src/oc_erchef/apps/chef_index/src/chef_wait_group.erl index 4b0252a4a7..1c9aa67380 100644 --- a/src/oc_erchef/apps/chef_index/src/chef_wait_group.erl +++ b/src/oc_erchef/apps/chef_index/src/chef_wait_group.erl @@ -60,8 +60,8 @@ start_link(Fun, Config) -> %% Use start_link/3 here as the caller gen_server:start_link(?MODULE, [Fun, Config], []). --spec add(pid(), term(), list()|term()) -> ok | {error, wait_called}. -add(Pid, JobName, JobArgs) when is_list(JobArgs)-> +-spec add(pid(), term(), list() | term()) -> ok | {error, wait_called}. +add(Pid, JobName, JobArgs) when is_list(JobArgs) -> gen_server:call(Pid, {add_job, JobName, JobArgs}); add(Pid, JobName, JobArgs) -> add(Pid, JobName, [JobArgs]). @@ -149,7 +149,7 @@ spawn_worker(JobName, JobArgs, State = #chef_wait_group_state{job_fun = Fun, waiting_workers = Workers}) -> Parent = self(), Pid = spawn_link(fun() -> run_user_callback(Fun, JobArgs, Parent) end), - {ok, State#chef_wait_group_state{waiting_workers = [{Pid, JobName}|Workers]}}. + {ok, State#chef_wait_group_state{waiting_workers = [{Pid, JobName} | Workers]}}. run_user_callback(Fun, JobArgs, Parent) -> try apply(Fun, JobArgs) of @@ -176,7 +176,7 @@ mark_job_done(Pid, Result, State = #chef_wait_group_state{ waiting_workers = Workers, done_jobs = DoneList}) -> {ok, {_Pid, JobName}, WaitingWorkers} = worker_from_list(Pid, Workers), - State#chef_wait_group_state{done_jobs = [{JobName, Result}|DoneList], + State#chef_wait_group_state{done_jobs = [{JobName, Result} | DoneList], waiting_workers = WaitingWorkers}. -spec mark_job_failed(pid(), term(), #chef_wait_group_state{}) -> #chef_wait_group_state{}. @@ -184,7 +184,7 @@ mark_job_failed(Pid, Result, State = #chef_wait_group_state{ waiting_workers = Workers, failed_jobs = FailedList}) -> {ok, {_Pid, JobName}, WaitingWorkers} = worker_from_list(Pid, Workers), - State#chef_wait_group_state{failed_jobs = [{JobName, Result}|FailedList], + State#chef_wait_group_state{failed_jobs = [{JobName, Result} | FailedList], waiting_workers = WaitingWorkers}. -spec worker_from_list(pid(), [{pid(), term()}]) -> {ok, {pid(), term()}, [{pid(), term()}]} | {error, no_worker}. @@ -193,10 +193,10 @@ worker_from_list(Pid, Workers) -> worker_from_list(_Pid, [], _Acc) -> {error, no_worker}; -worker_from_list(Pid, [Found = {Pid, _JobName}|Rest], Acc) -> +worker_from_list(Pid, [Found = {Pid, _JobName} | Rest], Acc) -> {ok, Found, lists:append(Acc, Rest)}; -worker_from_list(Pid, [NotFound|Rest], Acc)-> - worker_from_list(Pid, Rest, [NotFound|Acc]). +worker_from_list(Pid, [NotFound | Rest], Acc) -> + worker_from_list(Pid, Rest, [NotFound | Acc]). -spec make_gather_reply(list(), list()) -> {ok, list()} | {error, list(), list()}. make_gather_reply(Done, []) -> diff --git a/src/oc_erchef/apps/chef_index/test/chef_index_expand_tests.erl b/src/oc_erchef/apps/chef_index/test/chef_index_expand_tests.erl index 3d4b349392..5976b42555 100644 --- a/src/oc_erchef/apps/chef_index/test/chef_index_expand_tests.erl +++ b/src/oc_erchef/apps/chef_index/test/chef_index_expand_tests.erl @@ -3,17 +3,17 @@ -include_lib("eunit/include/eunit.hrl"). -define(ROLE, - {[{<<"name">>,<<"web_role">>}, - {<<"description">>,<<"something something">>}, - {<<"json_class">>,<<"Chef::Role">>}, - {<<"chef_type">>,<<"role">>}, + {[{<<"name">>, <<"web_role">>}, + {<<"description">>, <<"something something">>}, + {<<"json_class">>, <<"Chef::Role">>}, + {<<"chef_type">>, <<"role">>}, {<<"default_attributes">>, - {[{<<"test1">>,1},{<<"test2">>,<<"2">>}]}}, + {[{<<"test1">>, 1}, {<<"test2">>, <<"2">>}]}}, {<<"override_attributes">>, - {[{<<"test1">>,8},{<<"rideover">>,<<"10-4">>}]}}, - {<<"run_list">>,[<<"apache2">>,<<"php">>]}, + {[{<<"test1">>, 8}, {<<"rideover">>, <<"10-4">>}]}}, + {<<"run_list">>, [<<"apache2">>, <<"php">>]}, {<<"env_run_lists">>, - {[{<<"prod">>,[<<"nginx">>]}]}}]}). + {[{<<"prod">>, [<<"nginx">>]}]}}]}). -define(DB_ITEM, {[ @@ -372,7 +372,7 @@ es_api_test_() -> op_api_test_() -> MinItem = {[{<<"key1">>, <<"value1">>}, {<<"key2">>, <<"value-2">>}]}, - JsonContentType = [{"Authorization","Basic YWRtaW46YWRtaW4="},{"Content-Type", "application/json"}], + JsonContentType = [{"Authorization","Basic YWRtaW46YWRtaW4="}, {"Content-Type", "application/json"}], {foreach, fun() -> application:set_env(chef_index, search_provider, opensearch), diff --git a/src/oc_erchef/apps/chef_index/test/chef_index_tests.erl b/src/oc_erchef/apps/chef_index/test/chef_index_tests.erl index 13a8ed6c1b..efd86f2f1e 100644 --- a/src/oc_erchef/apps/chef_index/test/chef_index_tests.erl +++ b/src/oc_erchef/apps/chef_index/test/chef_index_tests.erl @@ -18,23 +18,23 @@ -module(chef_index_tests). -include_lib("eunit/include/eunit.hrl"). -define(EXPECTED_DOC, [<<"">>, - [[<<">,<<"X_CHEF_id_CHEF_X">>,<<"\">">>,<<"a1">>, + [[<<">, <<"X_CHEF_id_CHEF_X">>, <<"\">">>, <<"a1">>, <<"">>], - [<<">,<<"X_CHEF_database_CHEF_X">>,<<"\">">>,<<"chef_db1">>, + [<<">, <<"X_CHEF_database_CHEF_X">>, <<"\">">>, <<"chef_db1">>, <<"">>], - [<<">,<<"X_CHEF_type_CHEF_X">>,<<"\">">>,<<"role">>, + [<<">, <<"X_CHEF_type_CHEF_X">>, <<"\">">>, <<"role">>, <<"">>]], [], - [<<">,<<"content">>,<<"\">">>, - [[<<"X_CHEF_database_CHEF_X">>,<<"__=__">>,<<"chef_db1">>,<<" ">>], - [<<"X_CHEF_id_CHEF_X">>,<<"__=__">>,<<"a1">>,<<" ">>], - [<<"X_CHEF_type_CHEF_X">>,<<"__=__">>,<<"role">>,<<" ">>], - [<<"key1">>,<<"__=__">>,<<"value1">>,<<" ">>], - [<<"key2">>,<<"__=__">>,<<"value2">>,<<" ">>]], + [<<">, <<"content">>, <<"\">">>, + [[<<"X_CHEF_database_CHEF_X">>, <<"__=__">>, <<"chef_db1">>, <<" ">>], + [<<"X_CHEF_id_CHEF_X">>, <<"__=__">>, <<"a1">>, <<" ">>], + [<<"X_CHEF_type_CHEF_X">>, <<"__=__">>, <<"role">>, <<" ">>], + [<<"key1">>, <<"__=__">>, <<"value1">>, <<" ">>], + [<<"key2">>, <<"__=__">>, <<"value2">>, <<" ">>]], <<"">>], <<"">>]). --define(EXPECTED_DELETE_DOC, [<<"">>,<<"a1">>,<<"">>]). +-define(EXPECTED_DELETE_DOC, [<<"">>, <<"a1">>, <<"">>]). chef_index_test_() -> Item = {[{<<"key1">>, <<"value1">>}, diff --git a/src/oc_erchef/apps/chef_license/src/chef_license.erl b/src/oc_erchef/apps/chef_license/src/chef_license.erl index 52f5e04844..a42ae1c850 100644 --- a/src/oc_erchef/apps/chef_license/src/chef_license.erl +++ b/src/oc_erchef/apps/chef_license/src/chef_license.erl @@ -2,5 +2,5 @@ -export([get_license/0]). -get_license()-> - chef_license_worker:get_license(). \ No newline at end of file +get_license() -> + chef_license_worker:get_license(). diff --git a/src/oc_erchef/apps/chef_license/src/chef_license_worker.erl b/src/oc_erchef/apps/chef_license/src/chef_license_worker.erl index 361109842d..18c20a1ccc 100644 --- a/src/oc_erchef/apps/chef_license/src/chef_license_worker.erl +++ b/src/oc_erchef/apps/chef_license/src/chef_license_worker.erl @@ -44,7 +44,7 @@ start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). -get_license()-> +get_license() -> gen_server:call(?MODULE, get_license). %%% ====================================== @@ -55,20 +55,20 @@ init(_Config) -> erlang:send_after(?DEFAULT_LICENSE_SCAN_INTERVAL, self(), check_license), {ok, State}. -handle_call(get_license, _From, #state{license_cache = undefined, license_type=Type, expiration_date=ExpDate, grace_period = GracePeriod, message = Msg, customer_name=CN,license_id = LicenseId}=State) -> - {reply, {valid_license, Type, GracePeriod, ExpDate, Msg, CN,LicenseId}, State}; -handle_call(get_license, _From, #state{license_cache = Lic, license_type=Type, expiration_date=ExpDate, grace_period = GracePeriod, message = Msg, customer_name=CN,license_id = LicenseId} = State) -> - {reply,{Lic, Type, GracePeriod, ExpDate, Msg, CN,LicenseId}, State}; +handle_call(get_license, _From, #state{license_cache = undefined, license_type=Type, expiration_date=ExpDate, grace_period = GracePeriod, message = Msg, customer_name=CN, license_id = LicenseId}=State) -> + {reply, {valid_license, Type, GracePeriod, ExpDate, Msg, CN, LicenseId}, State}; +handle_call(get_license, _From, #state{license_cache = Lic, license_type=Type, expiration_date=ExpDate, grace_period = GracePeriod, message = Msg, customer_name=CN, license_id = LicenseId} = State) -> + {reply, {Lic, Type, GracePeriod, ExpDate, Msg, CN, LicenseId}, State}; handle_call(_Message, _From, State) -> {noreply, State}. handle_cast(_Message, State) -> {noreply, State}. -handle_info(check_license, State)-> +handle_info(check_license, State) -> State1 = check_license(State), erlang:send_after(?DEFAULT_LICENSE_SCAN_INTERVAL, self(), check_license), - {noreply,State1}; + {noreply, State1}; handle_info(_Message, State) -> {noreply, State}. @@ -89,14 +89,14 @@ check_license(State) -> {'EXIT', _} -> <<"">> end, case process_license(JsonStr) of - {ok, valid_license, ExpDate, CustomerName,LicenseId} -> - State#state{license_cache=valid_license, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, customer_name=CustomerName,license_id = LicenseId}; - {ok, commercial_expired, ExpDate, Msg, CustomerName,LicenseId} -> - State#state{license_cache=commercial_expired, license_type = <<"commercial">>, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName,license_id = LicenseId}; - {ok, commercial_grace_period, ExpDate, Msg, CustomerName,LicenseId} -> - State#state{license_cache=commercial_grace_period, grace_period=true, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName,license_id = LicenseId}; - {ok, trial_expired, ExpDate, Msg, CustomerName,LicenseId} -> - State#state{license_cache=trial_expired_expired, license_type = <<"trial">>, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName,license_id = LicenseId}; + {ok, valid_license, ExpDate, CustomerName, LicenseId} -> + State#state{license_cache=valid_license, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, customer_name=CustomerName, license_id = LicenseId}; + {ok, commercial_expired, ExpDate, Msg, CustomerName, LicenseId} -> + State#state{license_cache=commercial_expired, license_type = <<"commercial">>, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName, license_id = LicenseId}; + {ok, commercial_grace_period, ExpDate, Msg, CustomerName, LicenseId} -> + State#state{license_cache=commercial_grace_period, grace_period=true, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName, license_id = LicenseId}; + {ok, trial_expired, ExpDate, Msg, CustomerName, LicenseId} -> + State#state{license_cache=trial_expired_expired, license_type = <<"trial">>, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date=ExpDate, message=Msg, customer_name=CustomerName, license_id = LicenseId}; {error, no_license} -> State#state{license_cache=trial_expired_expired, license_type = <<"trial">>, grace_period=undefined, scanned_time = erlang:timestamp(), expiration_date="", message=get_alert_message(trial_expired, "")}; {error, _} -> State @@ -108,7 +108,7 @@ get_license_info() -> {JsonStr} = jiffy:decode(Bin), JsonStr. -process_license(<<"">>)-> +process_license(<<"">>) -> {error, invalid_json}; process_license(LicJson) -> case ej:get({<<"result">>}, LicJson) of @@ -116,22 +116,22 @@ process_license(LicJson) -> CustomerName = ej:get({<<"customer_name">>}, LicDetails), LicenseId = ej:get({<<"license_id">>}, LicDetails), case ej:get({<<"expiration_date">>}, LicDetails) of - {[{<<"seconds">>,ExpireInSeconds}]} -> + {[{<<"seconds">>, ExpireInSeconds}]} -> ExpDate = sec_to_date(ExpireInSeconds), case os:system_time(second) < ExpireInSeconds of - true -> {ok, valid_license, ExpDate, CustomerName,LicenseId}; + true -> {ok, valid_license, ExpDate, CustomerName, LicenseId}; _ -> case ej:get({<<"license_type">>}, LicDetails) of <<"commercial">> -> case ej:get({<<"grace_period">>}, LicDetails) of true -> {ok, commercial_grace_period, ExpDate, - get_alert_message(commercial_grace_period, ExpDate), CustomerName,LicenseId}; + get_alert_message(commercial_grace_period, ExpDate), CustomerName, LicenseId}; _ -> - { ok, commercial_expired, ExpDate, get_alert_message(commercial_expired, ExpDate), CustomerName,LicenseId} + { ok, commercial_expired, ExpDate, get_alert_message(commercial_expired, ExpDate), CustomerName, LicenseId} end; _ -> - {ok, trial_expired, ExpDate, get_alert_message(trial_expired, ExpDate), CustomerName,LicenseId} + {ok, trial_expired, ExpDate, get_alert_message(trial_expired, ExpDate), CustomerName, LicenseId} end end; _ -> @@ -148,7 +148,7 @@ process_license(LicJson) -> {error, invalid_response} end. -get_alert_message(Type, ExpDate)-> +get_alert_message(Type, ExpDate) -> case Type of trial_expired -> "Your Progress Chef InfraServer license has expired or does not exist! You no longer have access to Chef InfraServer. Please contact the Account Team to upgrade to an Enterprise License."; @@ -158,14 +158,14 @@ get_alert_message(Type, ExpDate)-> "Your Progress Chef InfraServer license expired on " ++ ExpDate ++ " and you are currently on a limited extension period! To get a new license, please contact the Account Team or email us at chef-account-team@progress.com" end. -sec_to_date(Seconds)-> - BaseDate = calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}}), - Seconds1 = BaseDate + Seconds, - { {Year,Month,Day},_Time} = calendar:gregorian_seconds_to_datetime(Seconds1), - lists:flatten(io_lib:format("~4..0w-~2..0w-~2..0w",[Year,Month,Day])). +sec_to_date(Seconds) -> + BaseDate = calendar:datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}}), + Seconds1 = BaseDate + Seconds, + { {Year, Month, Day}, _Time} = calendar:gregorian_seconds_to_datetime(Seconds1), + lists:flatten(io_lib:format("~4..0w-~2..0w-~2..0w", [Year, Month, Day])). %%% ============================= %%% Sample license response %%% ============================= -test_license()-> - {ok,<<"{\"command\":\"chef-automate license status --result-json /tmp/string3\",\"status\":\"OK\",\"error_code\":0,\"error_description\":\"\",\"error_cause\":\"\",\"error_stack_trace\":\"\",\"error_recovery\":\"\",\"error_type\":\"\",\"result\":{\"set\":true,\"license_id\":\"6541d90a-2ed0-4d64-9861-c20fc21a3093\",\"customer_name\":\"janshahid.shaik@progress.com\",\"expiration_date\":{\"seconds\":1735689599},\"deployment_id\":\"0b9907b3-45d2-4faa-b04e-b76e31ba70e5\",\"deployment_type\":\"Standalone\",\"license_type\":\"trial\",\"deployment_at\":{\"seconds\":1727067698}}}">>}. \ No newline at end of file +test_license() -> + {ok, <<"{\"command\":\"chef-automate license status --result-json /tmp/string3\",\"status\":\"OK\",\"error_code\":0,\"error_description\":\"\",\"error_cause\":\"\",\"error_stack_trace\":\"\",\"error_recovery\":\"\",\"error_type\":\"\",\"result\":{\"set\":true,\"license_id\":\"6541d90a-2ed0-4d64-9861-c20fc21a3093\",\"customer_name\":\"janshahid.shaik@progress.com\",\"expiration_date\":{\"seconds\":1735689599},\"deployment_id\":\"0b9907b3-45d2-4faa-b04e-b76e31ba70e5\",\"deployment_type\":\"Standalone\",\"license_type\":\"trial\",\"deployment_at\":{\"seconds\":1727067698}}}">>}. diff --git a/src/oc_erchef/apps/chef_objects/src/chef_cbv_cache.erl b/src/oc_erchef/apps/chef_objects/src/chef_cbv_cache.erl index 1dbba74c4f..692d3ff5a8 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_cbv_cache.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_cbv_cache.erl @@ -93,7 +93,7 @@ start_link() -> %% These changes only make sense if we hit new usage patterns that overwhelm the %% cbv_cache message queue on a regular basis. %% --spec get(any()) -> {error, retry|busy} | undefined | term(). +-spec get(any()) -> {error, retry | busy} | undefined | term(). get(Key) -> send_if_available({get, Key}). @@ -104,7 +104,7 @@ get(Key) -> %% If it returns '{error, busy}' the process is not available to service the request, and %% the caller should fail without retrying. %% Returns undefined if the cache is disabled. --spec claim(any()) -> ok | {error, retry|busy} | undefined. +-spec claim(any()) -> ok | {error, retry | busy} | undefined. claim(Key) -> send_if_available({claim, Key}). diff --git a/src/oc_erchef/apps/chef_objects/src/chef_cert_http.erl b/src/oc_erchef/apps/chef_objects/src/chef_cert_http.erl index 4e9a14bf90..2c03385124 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_cert_http.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_cert_http.erl @@ -29,8 +29,8 @@ gen_cert/2 ]). --spec gen_cert(Guid::binary(), RequestId::binary()) -> {Cert::binary(), - Keypair::binary()}. +-spec gen_cert(Guid :: binary(), RequestId :: binary()) -> {Cert :: binary(), + Keypair :: binary()}. %% @doc Handle HTTP interaction with remote certificate server. %% This posts a common name (CN) to the server which is then used to generate %% a certificate remotely. We map common error cases to specific error messages @@ -51,13 +51,13 @@ gen_cert(Guid, RequestId) -> throw({error, Reason}) end. --spec body_for_post(Guid::binary()) -> <<_:64,_:_*8>>. +-spec body_for_post(Guid :: binary()) -> <<_:64, _:_*8>>. %% @doc construct a body which can be posted to the certificate server body_for_post(Guid) -> <<"common_name=URI:http://chef.io/GUIDS/", Guid/binary>>. --spec parse_json_response(Body::string()) -> {Cert::binary(), - Keypair::binary()}. +-spec parse_json_response(Body :: string()) -> {Cert :: binary(), + Keypair :: binary()}. %% @doc extract the certificate and keypair from the json structure. %% %% We apply here a version for the Pubkey diff --git a/src/oc_erchef/apps/chef_objects/src/chef_client.erl b/src/oc_erchef/apps/chef_objects/src/chef_client.erl index c38242584b..08fb90416d 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_client.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_client.erl @@ -77,7 +77,7 @@ org_id(#chef_client{org_id = OrgId}) -> type_name(#chef_client{}) -> client. -authz_id(#chef_client{authz_id = AuthzId})-> +authz_id(#chef_client{authz_id = AuthzId}) -> AuthzId. is_indexed(_ObjectRec) -> @@ -241,7 +241,7 @@ base_client_ejson(#chef_client{name = Name, validator = Validator}, OrgName) -> %% EJson-encoded Erlang data structure, using passed defaults %% @end --spec parse_binary_json(api_version(), binary(), undefined | #chef_client{}) -> {'ok',ej:json_object()}. % or throw +-spec parse_binary_json(api_version(), binary(), undefined | #chef_client{}) -> {'ok', ej:json_object()}. % or throw parse_binary_json(ApiVersion, Bin, undefined) -> validate_json(ApiVersion, Bin, undefined, undefined); parse_binary_json(ApiVersion, Bin, #chef_client{name = ReqName} = CurrentClient) -> diff --git a/src/oc_erchef/apps/chef_objects/src/chef_cookbook_version.erl b/src/oc_erchef/apps/chef_objects/src/chef_cookbook_version.erl index 0a51347761..3e9dcf6004 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_cookbook_version.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_cookbook_version.erl @@ -114,7 +114,7 @@ -behaviour(chef_object). -authz_id(#chef_cookbook_version{authz_id = AuthzId})-> +authz_id(#chef_cookbook_version{authz_id = AuthzId}) -> AuthzId. -spec name(#chef_cookbook_version{}) -> binary(). @@ -388,9 +388,9 @@ is_valid_version(Version) -> %% than one dot or more than two dots is an error. %% %% @end --spec parse_version(Version::binary()) -> {Major::non_neg_integer(), - Minor::non_neg_integer(), - Patch::non_neg_integer()}. +-spec parse_version(Version :: binary()) -> {Major :: non_neg_integer(), + Minor :: non_neg_integer(), + Patch :: non_neg_integer()}. parse_version(Version) when is_binary(Version) -> Parts = [list_to_integer(binary_to_list(V)) || V <- binary:split(Version, <<".">>, [global])], @@ -432,9 +432,9 @@ not_larger_than(Max, L) -> %% @doc given a version tuple {Major, Minor, Patch} return it as a %% binary() %% @end --spec version_to_binary({Major::non_neg_integer(), - Minor::non_neg_integer(), - Patch::non_neg_integer()}) -> +-spec version_to_binary({Major :: non_neg_integer(), + Minor :: non_neg_integer(), + Patch :: non_neg_integer()}) -> binary(). version_to_binary({Major, Minor, Patch}) -> iolist_to_binary([integer_to_list(Major), ".", @@ -593,7 +593,7 @@ inflate(_Type, Data) -> %% Originally intended to operate on the `serialized_object' data of a Cookbook Version (the %% piece that actually has the recipes in it), but could also work on the gzipped JSON %% string for the entire cookbook, if that's ever a thing you'd want to do. --spec extract_recipe_names(XCookbookJSON::binary()) -> [RecipeName::binary()]. +-spec extract_recipe_names(XCookbookJSON :: binary()) -> [RecipeName :: binary()]. extract_recipe_names(<<31, 139, _Rest/binary>>=XCookbookJSON) -> EJson = chef_db_compression:decompress_and_decode(XCookbookJSON), diff --git a/src/oc_erchef/apps/chef_objects/src/chef_data_bag_item.erl b/src/oc_erchef/apps/chef_objects/src/chef_data_bag_item.erl index ecdc2ddebd..90c6f521ec 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_data_bag_item.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_data_bag_item.erl @@ -213,7 +213,7 @@ validate(DataBagItem) -> %% %% Returns `DataBagItem` with `IdFromUrl` unambiguously set as its id, or throws if it was %% mismatched. --spec normalized_data_bag_for_update(ej:json_object(), binary()) ->ej:json_object(). +-spec normalized_data_bag_for_update(ej:json_object(), binary()) -> ej:json_object(). normalized_data_bag_for_update(DataBagItem, IdFromUrl) -> case ej:get({<<"id">>}, DataBagItem)of undefined -> diff --git a/src/oc_erchef/apps/chef_objects/src/chef_depsolver.erl b/src/oc_erchef/apps/chef_objects/src/chef_depsolver.erl index 4e245ebd93..71a3790a53 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_depsolver.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_depsolver.erl @@ -104,9 +104,9 @@ validate_body(Body) -> -spec solve_dependencies(AllVersions :: [dependency_set()], EnvConstraints :: [constraint()], - Cookbooks :: [Name::binary() | - {Name::binary(), Version::binary()}]) -> - {ok, [ versioned_cookbook()]} | {error, term()}. + Cookbooks :: [Name :: binary() | + {Name :: binary(), Version :: binary()}]) -> + {ok, [versioned_cookbook()]} | {error, term()}. %% @doc Main entry point into the depsolver. It is supplied with a dependency_set() %% containing all the cookbook versions and their dependencies that are in the database diff --git a/src/oc_erchef/apps/chef_objects/src/chef_depsolver_worker.erl b/src/oc_erchef/apps/chef_objects/src/chef_depsolver_worker.erl index e059087178..b73da5e52c 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_depsolver_worker.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_depsolver_worker.erl @@ -42,10 +42,11 @@ -record(state, {port, os_pid}). --ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). %% TODO: remove --endif. --include("chef_types.hrl"). +%-ifdef(TEST). +%-include_lib("eunit/include/eunit.hrl"). %% TODO: remove +%-endif. +%-include("chef_types.hrl"). %%%=================================================================== %%% API @@ -66,12 +67,12 @@ start_link() -> %% Solve dependencies with the given state and constraints %% @end %%-------------------------------------------------------------------- --spec solve_dependencies(AllVersions :: [chef_depsolver:dependency_set()], - EnvConstraints :: [chef_depsolver:constraint()], - Cookbooks :: [Name::binary() | - {Name::binary(), Version::binary()}], - Timeout :: integer()) -> - {ok, [ versioned_cookbook()]} | {error, term()}. +%-spec solve_dependencies(AllVersions :: [chef_depsolver:dependency_set()], +% EnvConstraints :: [chef_depsolver:constraint()], +% Cookbooks :: [Name :: binary() | +% {Name :: binary(), Version :: binary()}], +% Timeout :: integer()) -> +% {ok, [ versioned_cookbook()]} | {error, term()}. solve_dependencies(AllVersions, EnvConstraints, Cookbooks, Timeout) -> case pooler:take_member(chef_depsolver, pooler_timeout()) of error_no_members -> @@ -124,7 +125,7 @@ init([]) -> %% info on startup so that we can use it in the event of a timeout. Hard-killing the process %% handles the failure case where the Ruby process gets hung and can no longer respond to %% STDOUT closing, which would typically cause the process to exit. - Payload = term_to_binary({get_pid}), + Payload = term_to_binary({get_pid}, [{minor_version, 1}]), erlang:port_command(Port, Payload), Pid = receive {Port, {data, Data}} -> @@ -153,7 +154,7 @@ handle_call({solve, AllVersions, EnvConstraints, Cookbooks, Timeout}, Payload = term_to_binary({solve, [{environment_constraints, EnvConstraints}, {all_versions, AllVersions}, {run_list, Cookbooks}, - {timeout_ms, Timeout}]}), + {timeout_ms, Timeout}]}, [{minor_version, 1}]), erlang:port_command(Port, Payload), %% The underlying ruby code has the potential to reach nearly 2x the diff --git a/src/oc_erchef/apps/chef_objects/src/chef_environment.erl b/src/oc_erchef/apps/chef_objects/src/chef_environment.erl index 971c3c048a..2789b568b1 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_environment.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_environment.erl @@ -138,7 +138,7 @@ validate_keys([]) -> ok; validate_keys({List}) when is_list(List) -> validate_keys(List); -validate_keys([{Item, _}|Rest]) -> +validate_keys([{Item, _} | Rest]) -> case lists:member(Item, ?VALID_KEYS) of true -> validate_keys(Rest); _ -> diff --git a/src/oc_erchef/apps/chef_objects/src/chef_key_base.erl b/src/oc_erchef/apps/chef_objects/src/chef_key_base.erl index ee53115033..ea213c5b3d 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_key_base.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_key_base.erl @@ -36,7 +36,7 @@ key_owner_type/1]). -spec maybe_generate_key_pair(ej:json_object(), - fun(( keygen_timeout | not_requested | {binary(),binary()}) -> term())) -> term(). + fun(( keygen_timeout | not_requested | {binary(), binary()}) -> term())) -> term(). maybe_generate_key_pair(EJ, ContinuationFun) -> KeyPair = case ej:get({<<"create_key">>}, EJ) of true -> chef_keygen_cache:get_key_pair(); @@ -104,7 +104,7 @@ set_public_key(EJ, PublicKey) -> %% SPKI format are version 0. The PKCS1 format is deprecated, but %% supported for read. We will only generate certs or SPKI packaged %% keys. --spec key_version(<<_:64,_:_*8>>) -> 0 | 1. +-spec key_version(<<_:64, _:_*8>>) -> 0 | 1. key_version(null) -> null; key_version(<<"-----BEGIN CERTIFICATE", _Bin/binary>>) -> @@ -141,7 +141,7 @@ has_public_key_header(_) -> -spec public_key_spec( req | opt ) -> term(). public_key_spec(OptOrRequired) -> - {{OptOrRequired,<<"public_key">>}, {fun_match, {fun valid_public_key/1, string, + {{OptOrRequired, <<"public_key">>}, {fun_match, {fun valid_public_key/1, string, <<"Public Key must be a valid key.">>}}}. cert_or_key(Payload) -> @@ -181,7 +181,7 @@ value_or_undefined(Key, Data) -> %% For clients % Will ensure that if required, only one of 'create_key' or 'public_key' is present, % and that the one present is valid. --spec validate_public_key_fields(opt|req, ej:json_object(), key|user|client, create|update) -> {ok, ej:json_object()}. % or throw +-spec validate_public_key_fields(opt | req, ej:json_object(), key | user | client, create | update) -> {ok, ej:json_object()}. % or throw % but we want USER and CLIENT to reject all key-related fields on UPDATE. diff --git a/src/oc_erchef/apps/chef_objects/src/chef_metrics.erl b/src/oc_erchef/apps/chef_objects/src/chef_metrics.erl index c6eb521c90..3d35daa43f 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_metrics.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_metrics.erl @@ -39,7 +39,7 @@ %% function under measurement that implements some aspect of that %% functionality. -spec label(Upstream :: atom(), - {Mod :: atom(), Fun :: atom()}) -> Label :: <<_:16,_:_*8>>. + {Mod :: atom(), Fun :: atom()}) -> Label :: <<_:16, _:_*8>>. label(s3=Upstream, {Mod, Fun}) when is_atom(Mod), is_atom(Fun) -> %% S3-related labels are special snowflakes because we want to diff --git a/src/oc_erchef/apps/chef_objects/src/chef_node.erl b/src/oc_erchef/apps/chef_objects/src/chef_node.erl index a18e0b3b02..c1b5629534 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_node.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_node.erl @@ -237,7 +237,7 @@ update_query(_ObjectRec) -> %% the policy_name and policy_group fields if they're not present, so we allow %% 'undefined' to pass through. fields_for_insert(Rec) -> - [_RecName, _ApiVersion|Tail] = tuple_to_list(Rec), + [_RecName, _ApiVersion | Tail] = tuple_to_list(Rec), Tail. fields_for_update(#chef_node{environment = Environment, diff --git a/src/oc_erchef/apps/chef_objects/src/chef_object.erl b/src/oc_erchef/apps/chef_objects/src/chef_object.erl index 0b11dbab1f..d035d610c9 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_object.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_object.erl @@ -34,7 +34,7 @@ | [[tuple()]] %% This is what it looks like when your %% ReturnTransform is 'rows' | {error, _}. --type select_callback() :: fun(({ QueryName ::atom(), BindParameters :: list(), +-type select_callback() :: fun(({ QueryName :: atom(), BindParameters :: list(), ReturnFieldNames :: [atom()]} | {QueryName :: atom(), BindParameters :: list()} | {QueryName :: atom(), BindParameters :: list(), @@ -204,7 +204,7 @@ fields_for_fetch(Rec) -> is_indexed(Rec) -> call(Rec, is_indexed). --spec list(Rec:: object_rec(), CallbackFun :: select_callback()) -> +-spec list(Rec :: object_rec(), CallbackFun :: select_callback()) -> select_return(). list(Rec, CallbackFun) -> Mod = element(1, Rec), diff --git a/src/oc_erchef/apps/chef_objects/src/chef_object_base.erl b/src/oc_erchef/apps/chef_objects/src/chef_object_base.erl index 052760c8d9..67668c1aa8 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_object_base.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_object_base.erl @@ -59,8 +59,8 @@ sql_date(now) -> sql_date(DateString) when is_binary(DateString) -> DateString; -sql_date({_,_,_} = TS) -> - {{Year,Month,Day},{Hour,Minute,Second}} = calendar:now_to_universal_time(TS), +sql_date({_, _, _} = TS) -> + {{Year, Month, Day}, {Hour, Minute, Second}} = calendar:now_to_universal_time(TS), iolist_to_binary(io_lib:format("~4w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w", [Year, Month, Day, Hour, Minute, Second])). @@ -71,9 +71,9 @@ sql_date({_,_,_} = TS) -> %% If given a JSON binary, it is assumed that the string represents ONLY a dependencies / %% constraint hash (i.e., it's just a mapping of name to constraint string). It is assumed %% that the given input (either JSON or Environment) have been previously validated. --spec depsolver_constraints(#chef_environment{serialized_object::binary()} +-spec depsolver_constraints(#chef_environment{serialized_object :: binary()} | binary() % JSON string - | {[{Name::binary(), ConstraintString::binary()}]}) %% EJson hash + | {[{Name :: binary(), ConstraintString :: binary()}]}) %% EJson hash -> [ chef_depsolver:raw_constraint() ]. depsolver_constraints(#chef_environment{serialized_object=SerializedObject}) -> EJson = chef_db_compression:decompress_and_decode(SerializedObject), @@ -133,7 +133,7 @@ make_org_prefix_id(OrgId) -> FakeName = crypto:strong_rand_bytes(32), %% Picked 32 for the hell of it make_org_prefix_id(OrgId, FakeName). --spec make_org_prefix_id(<<_:256>>, string()|binary()) -> <<_:256>>. +-spec make_org_prefix_id(<<_:256>>, string() | binary()) -> <<_:256>>. %% @doc Create a guid with org-specific prefix %% %% We use the last 48 bits of the org guid as the prefix for the object guid. The remainder @@ -179,17 +179,17 @@ normalize_run_list(RunList) -> %% It is assumed that only legal run list items will be input to this function (i.e., the %% run lists they are part of have already been validated). %% -%% NOTE: About the spec here, `<<_:40,_:_*8>>` is the notation for a binary string that is +%% NOTE: About the spec here, `<<_:40, _:_*8>>` is the notation for a binary string that is %% at least 5 bytes long (8 bits * 5 = 40). This comes from Dialyzer inferring that the %% smallest possible return value for this function would be <<"role[">>, which (while true) %% is rather unhelpful. We can't specify a return value of `binary()`, however, because %% that is an underspecification, which conflicts with our Dialyzer setting of -Wunderspecs; %% we want to keep that because it's a generally useful setting... just not when dealing %% with Erlang's lack of a true string data type :( --spec normalize_item(binary()) -> <<_:40,_:_*8>>. -normalize_item(<<"role[",_Item/binary>>=Role) -> +-spec normalize_item(binary()) -> <<_:40, _:_*8>>. +normalize_item(<<"role[", _Item/binary>>=Role) -> Role; -normalize_item(<<"recipe[",_Item/binary>>=Recipe) -> +normalize_item(<<"recipe[", _Item/binary>>=Recipe) -> Recipe; normalize_item(Recipe) when is_binary(Recipe) -> <<"recipe[", Recipe/binary, "]">>. @@ -203,7 +203,7 @@ normalize_item(Recipe) when is_binary(Recipe) -> %% %% TODO: This would be a good candidate for a 'chef_common' module function; it's copied %% from chef_wm_depsolver:remove_dups/1. --spec deduplicate_run_list([<<_:40,_:_*8>>]) -> list(). +-spec deduplicate_run_list([<<_:40, _:_*8>>]) -> list(). deduplicate_run_list(L) -> WithIdx = lists:zip(L, lists:seq(1, length(L))), [ Elt || {Elt, _} <- lists:ukeysort(2, lists:ukeysort(1, WithIdx)) ]. @@ -245,7 +245,7 @@ allowed_keys(_ValidKeys, []) -> ok; allowed_keys(ValidKeys, {List}) when is_list(List) -> allowed_keys(ValidKeys, List); -allowed_keys(ValidKeys, [{Item, _}|Rest]) -> +allowed_keys(ValidKeys, [{Item, _} | Rest]) -> case lists:member(Item, ValidKeys) of true -> allowed_keys(ValidKeys, Rest); _ -> @@ -298,7 +298,7 @@ validate_date_field(EJ, FieldBinary) -> % SafeTimestring = re:replace(ej:get({FieldBinary}, EJ), "Z", "",[global,{return,binary}]), % ej:set({FieldBinary}, EJ, SafeTimestring) catch % if validation fails, throw proper date error - throw:{ej_invalid,string_match,FieldBinary,_,_,_,_} -> + throw:{ej_invalid, string_match, FieldBinary, _, _, _, _} -> throw({bad_date, FieldBinary}); throw:{ec_date, {bad_date, _}} -> throw({bad_date, FieldBinary}) @@ -322,5 +322,5 @@ parse_date(Date) when is_binary(Date) -> %% Note: side effect - effect of stripping any timezone data provided, eg 10:00:00+0100 will be captured as %% 10:00:00. %% Longer term we will need to submit an upstream PR to get epgsql_?datetime to behave properly. - [Date2|_] = re:split(Date, "[Zz+]"), + [Date2 | _] = re:split(Date, "[Zz+]"), ec_date:parse(binary_to_list(Date2)). diff --git a/src/oc_erchef/apps/chef_objects/src/chef_object_default_callbacks.erl b/src/oc_erchef/apps/chef_objects/src/chef_object_default_callbacks.erl index 4d52875963..81431faf21 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_object_default_callbacks.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_object_default_callbacks.erl @@ -36,7 +36,7 @@ update(Rec, CallbackFun) -> CallbackFun({Mod:update_query(Rec), Mod:fields_for_update(Rec)}). fields_for_insert(Rec) -> - [_RecName, _ApiVersion|Tail] = tuple_to_list(Rec), + [_RecName, _ApiVersion | Tail] = tuple_to_list(Rec), %% We detect if any of the fields in the record have not been set %% and throw an error case lists:any(fun is_undefined/1, Tail) of diff --git a/src/oc_erchef/apps/chef_objects/src/chef_role.erl b/src/oc_erchef/apps/chef_objects/src/chef_role.erl index a412ef6913..a00d53eb59 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_role.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_role.erl @@ -71,7 +71,7 @@ {<<"json_class">>, <<"Chef::Role">>}, {<<"chef_type">>, <<"role">>}, {<<"default_attributes">>, ?EMPTY_EJSON_HASH}, - {<<"override_attributes">>,?EMPTY_EJSON_HASH}, + {<<"override_attributes">>, ?EMPTY_EJSON_HASH}, {<<"run_list">>, []}, {<<"env_run_lists">>, ?EMPTY_EJSON_HASH} ]). @@ -96,7 +96,7 @@ <<"env_run_lists">>, <<"json_class">>, <<"name">>, <<"override_attributes">>, <<"run_list">> ]). --type role_action() :: create | { update, Name::binary() }. +-type role_action() :: create | { update, Name :: binary() }. -spec name(#chef_role{}) -> binary(). name(#chef_role{name = Name}) -> diff --git a/src/oc_erchef/apps/chef_objects/src/chef_s3.erl b/src/oc_erchef/apps/chef_objects/src/chef_s3.erl index a989a00d48..27d4dd1cbb 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_s3.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_s3.erl @@ -125,7 +125,7 @@ make_key(OrgId, Checksum) -> lists:append(["organization-", as_string(OrgId), "/checksum-", as_string(Checksum)]). %% @doc Base64-encode an MD5 hex string. --spec base64_checksum(Checksum::binary()) -> binary(). +-spec base64_checksum(Checksum :: binary()) -> binary(). base64_checksum(Checksum) -> {ok, [BigNum], []} = io_lib:fread("~16u", as_string(Checksum)), Bin = <>, @@ -136,7 +136,7 @@ base64_checksum(Checksum) -> bucket() -> envy:get(chef_objects, s3_platform_bucket_name, string). --spec headers_for_type(http_verb(), Checksum::binary()) -> [ {string(), string()} ]. +-spec headers_for_type(http_verb(), Checksum :: binary()) -> [ {string(), string()} ]. %% @doc helper function for generating headers for the S3 URL %% headers_for_type(put, Checksum) -> @@ -178,7 +178,7 @@ s3_internal_url() -> %% to be publicly accessible. If the url is configured with the %% atom host_header, then use the passed-in vhost url parameter. s3_external_url(VHostUrl) -> - case envy:get(chef_objects, s3_external_url, [atom,string]) of + case envy:get(chef_objects, s3_external_url, [atom, string]) of host_header -> VHostUrl; "http" ++ _ = Url -> diff --git a/src/oc_erchef/apps/chef_objects/src/chef_s3_ops.erl b/src/oc_erchef/apps/chef_objects/src/chef_s3_ops.erl index 57a5fd847f..1df647ad62 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_s3_ops.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_s3_ops.erl @@ -103,13 +103,13 @@ delete_file(OrgId, AwsConfig, Bucket, Checksum) -> _Response -> {ok, Checksum} catch - error:{aws_error, {http_error,404,_}} -> + error:{aws_error, {http_error, 404, _}} -> %% We got a 404. Since this *may* be indicative of weirdness, we'll log it, but %% we don't need to crash or raise an exception. error_logger:warning_msg("Deletion of file (checksum: ~p) for org ~p from bucket ~p (key: ~p) failed because the file was not found~n", [Checksum, OrgId, Bucket, Key]), {missing, Checksum}; - ExceptionClass:Reason-> + ExceptionClass:Reason -> %% Something unanticipated happened. We should log the specific reason for %% later analysis, but as far as the overall deletion operation is concerned, %% this is "just an error", and we can continue along. @@ -136,17 +136,17 @@ check_file(OrgId, AwsConfig, Bucket, Checksum, AttemptsLeft) -> %% an error. {ok, Checksum} catch - error:{aws_error, {http_error,404,_}} -> + error:{aws_error, {http_error, 404, _}} -> %% The file wasn't found. Log it and move on. error_logger:error_msg("Checking presence of file (checksum: ~p) for org ~p from bucket ~p (key: ~p) failed because the file was not found~n", [Checksum, OrgId, Bucket, Key]), {missing, Checksum}; %% TODO(ssd) 2020-09-03: should likely remove this case when we move to erlcloud - error:{aws_error, {socket_error,retry_later}} -> + error:{aws_error, {socket_error, retry_later}} -> error_logger:error_msg("Checking presence of file (checksum: ~p) for org ~p from bucket ~p (key: ~p) returned retry_later (retries left: ~p)~n", [Checksum, OrgId, Bucket, Key, AttemptsLeft - 1]), check_file(OrgId, AwsConfig, Bucket, Checksum, AttemptsLeft - 1); - ExceptionClass:Reason-> + ExceptionClass:Reason -> %% Something unanticipated happened. We should log the specific reason %% for later analysis, but as far as the overall checking operation is %% concerned, this is "just an error", and we can continue along. @@ -196,7 +196,7 @@ s3_checksum_op(OrgId, Checksums, Fun, TimeoutMsgTemplate) -> case Result of {ok, Checksum} -> {[Checksum | Ok], Missing, Timeouts, Errors}; {missing, Checksum} -> {Ok, [Checksum | Missing], Timeouts, Errors}; - {timeout, Checksum} -> {Ok, Missing, [Checksum |Timeouts], Errors}; + {timeout, Checksum} -> {Ok, Missing, [Checksum | Timeouts], Errors}; {error, Checksum} -> {Ok, Missing, Timeouts, [Checksum | Errors]} end end, diff --git a/src/oc_erchef/apps/chef_objects/src/chef_sandbox.erl b/src/oc_erchef/apps/chef_objects/src/chef_sandbox.erl index 41571b1920..1e6066972d 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_sandbox.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_sandbox.erl @@ -67,7 +67,7 @@ validate(Sandbox) -> -spec valid_checksum_hash(Input :: any()) -> ok | error. valid_checksum_hash({[]}) -> error; -valid_checksum_hash({[{Checksum, Value}|Rest]}) -> +valid_checksum_hash({[{Checksum, Value} | Rest]}) -> case {is_md5_hex(Checksum), Value} of {true, null} -> case Rest of @@ -148,19 +148,19 @@ fetch(#chef_sandbox{org_id = OrgId, id = SandboxID}, CallbackFun) -> %% See the 'find_sandbox_by_id' prepared query for the row "shape". sandbox_join_rows_to_record(Rows) -> sandbox_join_rows_to_record(Rows, []). -sandbox_join_rows_to_record([LastRow|[]], Checksums) -> +sandbox_join_rows_to_record([LastRow | []], Checksums) -> C = proplist_to_checksum(LastRow), #chef_sandbox{id = safe_get(<<"sandbox_id">>, LastRow), org_id = safe_get(<<"org_id">>, LastRow), created_at = safe_get(<<"created_at">>, LastRow), - checksums = lists:reverse([C|Checksums])}; -sandbox_join_rows_to_record([Row|Rest], Checksums ) -> + checksums = lists:reverse([C | Checksums])}; +sandbox_join_rows_to_record([Row | Rest], Checksums ) -> C = proplist_to_checksum(Row), - sandbox_join_rows_to_record(Rest, [C|Checksums]). + sandbox_join_rows_to_record(Rest, [C | Checksums]). %% @doc Safely retrieves a value from a proplist. Throws an error if the specified key does %% not exist in the list. --spec safe_get(Key::binary(), Proplist::[{binary(), term()}]) -> term(). +-spec safe_get(Key :: binary(), Proplist :: [{binary(), term()}]) -> term(). safe_get(Key, Proplist) -> {Key, Value} = lists:keyfind(Key, 1, Proplist), Value. diff --git a/src/oc_erchef/apps/chef_objects/src/chef_user.erl b/src/oc_erchef/apps/chef_objects/src/chef_user.erl index a0a8ad8075..b54401e1fa 100644 --- a/src/oc_erchef/apps/chef_objects/src/chef_user.erl +++ b/src/oc_erchef/apps/chef_objects/src/chef_user.erl @@ -189,27 +189,27 @@ password_validator() -> user_spec(common) -> {[ {<<"display_name">>, string}, %% FIXME as an always-required field this belongs in the schema - {{opt,<<"first_name">>}, string}, %% Note that remaining fields are serialized via serialized_object and - {{opt,<<"last_name">>}, string} , %% are/were used by other Chef components. - {{opt,<<"middle_name">>}, string}, %% FIXME these should be retained by the components that need them - {{opt,<<"twitter_account">>}, string}, - {{opt,<<"city">>}, string}, - {{opt,<<"country">>}, string}, - {{opt,<<"external_authentication_uid">>}, string } + {{opt, <<"first_name">>}, string}, %% Note that remaining fields are serialized via serialized_object and + {{opt, <<"last_name">>}, string} , %% are/were used by other Chef components. + {{opt, <<"middle_name">>}, string}, %% FIXME these should be retained by the components that need them + {{opt, <<"twitter_account">>}, string}, + {{opt, <<"city">>}, string}, + {{opt, <<"country">>}, string}, + {{opt, <<"external_authentication_uid">>}, string } ]}; user_spec(create) -> - {[ {{opt,<<"password">>}, password_validator()} ]}; + {[ {{opt, <<"password">>}, password_validator()} ]}; user_spec(update) -> - {[ {{opt,<<"password">>}, password_validator()}, - {{opt,<<"recovery_authentication_enabled">>}, boolean }, - {{opt,<<"private_key">>}, boolean } ]}. + {[ {{opt, <<"password">>}, password_validator()}, + {{opt, <<"recovery_authentication_enabled">>}, boolean }, + {{opt, <<"private_key">>}, boolean } ]}. local_auth_user_spec(common) -> {[{<<"email">>, {fun_match, {fun valid_email/1, string, <<"email must be valid">>}}}]}; local_auth_user_spec(create) -> {[ {<<"password">>, password_validator()} ]}; local_auth_user_spec(update) -> - {[ {{opt,<<"password">>}, password_validator()} ]}. + {[ {{opt, <<"password">>}, password_validator()} ]}. @@ -432,7 +432,7 @@ update_from_ejson_common(User, UserEJson) -> merge_user_data(User, {ModData}) -> % If value in ModData is null, delete from user, otherwise replace or insert the value. - lists:foldl(fun({Key,null}, AccIn) -> + lists:foldl(fun({Key, null}, AccIn) -> ej:delete({Key}, AccIn); ({_Key, undefined}, AccIn) -> % Shouldn't happen, but just in case - don't insert value 'undefined' diff --git a/src/oc_erchef/apps/chef_objects/test/chef_config_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_config_tests.erl index 50597faa91..c31bda7874 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_config_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_config_tests.erl @@ -37,12 +37,12 @@ config_option_test_() -> application:set_env(TestApplication, TestOption, Value) end end, - fun(_,_) -> + fun(_, _) -> error_logger:tty(true), application:unset_env(TestApplication, TestOption) end, [{Value, - fun(_,_) -> + fun(_, _) -> {Description, fun() -> case {Value, ErrorState} of diff --git a/src/oc_erchef/apps/chef_objects/test/chef_data_bag_item_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_data_bag_item_tests.erl index 7fdab4f64a..8ac2fe2a0c 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_data_bag_item_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_data_bag_item_tests.erl @@ -52,7 +52,7 @@ new_record_test() -> Item = chef_data_bag_item:new_record(?API_MIN_VER, OrgId, no_authz_id, {<<"my-bag">>, Data}), ?assertMatch(#chef_data_bag_item{}, Item), %% TODO: validate more fields? - ?assertEqual({<<"my-bag">>,<<"my-item">>}, chef_data_bag_item:name(Item)). + ?assertEqual({<<"my-bag">>, <<"my-item">>}, chef_data_bag_item:name(Item)). ejson_for_indexing_test() -> RawItem = {[{<<"id">>, <<"the_item_name">>}, diff --git a/src/oc_erchef/apps/chef_objects/test/chef_data_bag_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_data_bag_tests.erl index bdfc8f0a52..d6a2b6bf55 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_data_bag_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_data_bag_tests.erl @@ -53,7 +53,7 @@ validate_data_bag_test_() -> true}, {"Data bag with extra fields succeeds", {[{<<"name">>, <<"foo">>}, - {<<"blahblah">>,<<"blahblahblah">>}]}, + {<<"blahblah">>, <<"blahblahblah">>}]}, true} ] diff --git a/src/oc_erchef/apps/chef_objects/test/chef_depsolver_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_depsolver_tests.erl index 0100572796..129d401195 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_depsolver_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_depsolver_tests.erl @@ -33,7 +33,7 @@ all_test_() -> {foreach, fun() -> error_logger:delete_report_handler(error_logger_tty_h), - [ ok = application:start(App) || App <- ?NEEDED_APPS ], + [ ok = application:ensure_started(App) || App <- ?NEEDED_APPS ], PoolConfig = [{name, chef_depsolver}, {max_count, 1}, {init_count, 1}, @@ -320,15 +320,15 @@ depsolver_first() -> {<<"0.2">>, []}, {<<"0.3">>, []}]}, {<<"app2">>, [{<<"0.1">>, []}, - {<<"0.2.33">>,[{<<"app3">>, <<"0.3">>}]}, + {<<"0.2.33">>, [{<<"app3">>, <<"0.3">>}]}, {<<"0.3">>, []}]}, {<<"app3">>, [{<<"0.1">>, []}, {<<"0.2">>, []}, {<<"0.3">>, []}]}], RunList = [{<<"app1">>, <<"0.1">>}], - Expected = {ok,[{<<"app1">>,{0,1,0}}, - {<<"app2">>,{0,2,33}}, - {<<"app3">>,{0,3,0}}]}, + Expected = {ok,[{<<"app1">>, {0,1,0}}, + {<<"app2">>, {0,2,33}}, + {<<"app3">>, {0,3,0}}]}, Result = chef_depsolver:solve_dependencies(World, [], RunList), ?assertEqual(Expected, Result). @@ -350,10 +350,10 @@ depsolver_second() -> {<<"0.3">>, []}]}], RunList = [{<<"app1">>, <<"0.1">>}, {<<"app2">>, <<"0.3">>}], - Expected = {ok, [{<<"app1">>,{0,1,0}}, - {<<"app2">>,{0,3,0}}, - {<<"app3">>,{0,3,0}}, - {<<"app4">>,{0,2,0}}]}, + Expected = {ok, [{<<"app1">>, {0,1,0}}, + {<<"app2">>, {0,3,0}}, + {<<"app3">>, {0,3,0}}, + {<<"app4">>, {0,2,0}}]}, Result = chef_depsolver:solve_dependencies(World, [], RunList), ?assertEqual(Expected, Result). @@ -385,18 +385,18 @@ depsolver_third() -> {<<"2.0.0">>, []}, {<<"6.0.0">>, []}]}], - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{3,0,0}}, - {<<"app4">>,{6,0,0}}, - {<<"app5">>,{6,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {3,0,0}}, + {<<"app4">>, {6,0,0}}, + {<<"app5">>, {6,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"3.0">>}])), - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{3,0,0}}, - {<<"app4">>,{6,0,0}}, - {<<"app5">>,{6,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {3,0,0}}, + {<<"app4">>, {6,0,0}}, + {<<"app5">>, {6,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], [<<"app1">>])). depsolver_fail() -> @@ -405,7 +405,7 @@ depsolver_fail() -> {<<"0.2">>, []}, {<<"0.3">>, []}]}, {<<"app2">>, [{<<"0.1">>, []}, - {<<"0.2">>,[{<<"app3">>, <<"0.1">>}]}, + {<<"0.2">>, [{<<"app3">>, <<"0.1">>}]}, {<<"0.3">>, []}]}, {<<"app3">>, [{<<"0.1">>, []}, {<<"0.2">>, []}, @@ -445,20 +445,20 @@ depsolver_conflicting_passing() -> {<<"2.0.0">>, []}, {<<"6.0.0">>, []}]}], - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{3,0,0}}, - {<<"app4">>,{5,0,0}}, - {<<"app5">>,{2,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {3,0,0}}, + {<<"app4">>, {5,0,0}}, + {<<"app5">>, {2,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"3.0">>}])), - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{3,0,0}}, - {<<"app4">>,{5,0,0}}, - {<<"app5">>,{2,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {3,0,0}}, + {<<"app4">>, {5,0,0}}, + {<<"app5">>, {2,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], %% [<<"app5">>, <<"app2">>, <<"app1">>])). @@ -468,8 +468,8 @@ depsolver_circular_dependencies() -> World = [{<<"app1">>, [{<<"0.1.0">>, [<<"app2">>]}]}, {<<"app2">>, [{<<"0.0.1">>, [<<"app1">>]}]}], - ?assertEqual({ok, [{<<"app1">>,{0,1,0}}, - {<<"app2">>,{0,0,1}}]}, + ?assertEqual({ok, [{<<"app1">>, {0,1,0}}, + {<<"app2">>, {0,0,1}}]}, chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"0.1.0">>}])). @@ -524,11 +524,11 @@ depsolver_pessimistic_major_minor_patch() -> {<<"0.3.0">>, []}, {<<"2.0.0">>, []}, {<<"6.0.0">>, []}]}], - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{2,1,5}}, - {<<"app4">>,{6,0,0}}, - {<<"app5">>,{6,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {2,1,5}}, + {<<"app4">>, {6,0,0}}, + {<<"app5">>, {6,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"3.0">>}])). depsolver_pessimistic_major_minor() -> @@ -562,11 +562,11 @@ depsolver_pessimistic_major_minor() -> {<<"0.3.0">>, []}, {<<"2.0.0">>, []}, {<<"6.0.0">>, []}]}], - ?assertEqual({ok, [{<<"app1">>,{3,0,0}}, - {<<"app2">>,{2,2,0}}, - {<<"app4">>,{6,0,0}}, - {<<"app5">>,{6,0,0}}, - {<<"app3">>,{0,1,3}}]}, + ?assertEqual({ok, [{<<"app1">>, {3,0,0}}, + {<<"app2">>, {2,2,0}}, + {<<"app4">>, {6,0,0}}, + {<<"app5">>, {6,0,0}}, + {<<"app3">>, {0,1,3}}]}, chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"3.0">>}])). depsolver_missing() -> @@ -576,7 +576,7 @@ depsolver_missing() -> {<<"0.2">>, [{<<"app4">>, <<"0.2">>}]}, {<<"0.3">>, [{<<"app4">>, <<"0.2">>, '='}]}]}, {<<"app2">>, [{<<"0.1">>, []}, - {<<"0.2">>,[{<<"app3">>, <<"0.3">>}]}, + {<<"0.2">>, [{<<"app3">>, <<"0.3">>}]}, {<<"0.3">>, []}]}, {<<"app3">>, [{<<"0.1">>, []}, {<<"0.2">>, []}, @@ -590,14 +590,14 @@ depsolver_missing() -> {constraints_not_met,[]}]}, Ret1), Ret2 = chef_depsolver:solve_dependencies(World, [], [{<<"app1">>, <<"0.1">>}]), - ?assertMatch({error,no_solution,_},Ret2). + ?assertMatch({error,no_solution, _}, Ret2). depsolver_missing_via_culprit_search() -> - World = [{<<"app1">>,[{<<"1.1.0">>,[]}]}, - {<<"app2">>,[{<<"0.0.1">>,[{<<"app1::oops">>,<<"0.0.0">>,'>='}]} ]} ], - Result = chef_depsolver:solve_dependencies(World, [], [<<"app1">>,<<"app2">>]), - ?assertMatch({error,no_solution,_}, Result). + World = [{<<"app1">>, [{<<"1.1.0">>, []}]}, + {<<"app2">>, [{<<"0.0.1">>, [{<<"app1::oops">>, <<"0.0.0">>,'>='}]} ]} ], + Result = chef_depsolver:solve_dependencies(World, [], [<<"app1">>, <<"app2">>]), + ?assertMatch({error,no_solution, _}, Result). %% This test from the depsolver library may no longer be necessary. It seems to be %% testing that you can pass binary data to depsolver and it performs the same. diff --git a/src/oc_erchef/apps/chef_objects/test/chef_key_base_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_key_base_tests.erl index 44566a2c70..71641f8335 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_key_base_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_key_base_tests.erl @@ -106,7 +106,7 @@ maybe_generate_key_pair_test_() -> {"when called with create_key false and a public key, the key is not generated and the public key is passed into the continuation fun", fun() -> Echo = fun(Data) -> Data end, - Result = chef_key_base:maybe_generate_key_pair({[{<<"create_key">>, false},{<<"public_key">>, <<"pubkey">>}]}, + Result = chef_key_base:maybe_generate_key_pair({[{<<"create_key">>, false}, {<<"public_key">>, <<"pubkey">>}]}, Echo), ?assertMatch({<<"pubkey">>, undefined}, Result) diff --git a/src/oc_erchef/apps/chef_objects/test/chef_key_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_key_tests.erl index bdb487dc44..9c0a701fcb 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_key_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_key_tests.erl @@ -26,7 +26,7 @@ -define(KEY_NAME, <<"test_key">>). -define(DEFAULT_EXPIRATION, <<"2099-10-24T22:49:08Z">>). %% The extra zero denotes UTC --define(PARSED_DEFAULT_EXPIRATION, {{2099,10,24},{22,49,08}}). +-define(PARSED_DEFAULT_EXPIRATION, {{2099, 10, 24}, {22, 49, 08}}). %% example post parse_binary_json example_key() -> @@ -115,7 +115,7 @@ new_record_test_() -> fun() -> KeyData = example_key(<<"not-a-date">>, undefined), ID = <<"testid">>, - ?assertThrow({ec_date,{bad_date,_}}, chef_key:new_record(?API_MIN_VER, unused, unused, {ID, KeyData})) + ?assertThrow({ec_date,{bad_date, _}}, chef_key:new_record(?API_MIN_VER, unused, unused, {ID, KeyData})) end }, {"check that KeyData with an invalid public_key throws invalid_public_key", @@ -158,8 +158,8 @@ parse_binary_json_update_test_() -> BadNameField = {[{<<"name">>, <<"bob^was^here">>}]}, BadDateField = {[{<<"expiration_date">>, <<"tomorrow">>}]}, - ?assertThrow({ej_invalid,fun_match,_,_,_,_,_}, chef_key:parse_binary_json(chef_json:encode(BadKeyField), update)), - ?assertThrow({ej_invalid, string_match,_,_,_,_,_}, chef_key:parse_binary_json(chef_json:encode(BadNameField), update)), + ?assertThrow({ej_invalid, fun_match, _, _, _, _, _}, chef_key:parse_binary_json(chef_json:encode(BadKeyField), update)), + ?assertThrow({ej_invalid, string_match, _, _, _, _, _}, chef_key:parse_binary_json(chef_json:encode(BadNameField), update)), ?assertThrow({bad_date, <<"expiration_date">>}, chef_key:parse_binary_json(chef_json:encode(BadDateField), update)) end }, @@ -174,7 +174,7 @@ parse_binary_json_update_test_() -> OriginalKey = example_key(<<"infinity">>, undefined), Key = ej:set({<<"name">>}, OriginalKey, <<"invalid^name">>), EncodedKey = chef_json:encode(Key), - ?assertThrow({ej_invalid,string_match,_,_,_,_,_}, chef_key:parse_binary_json(EncodedKey, update)) + ?assertThrow({ej_invalid, string_match, _, _, _, _, _}, chef_key:parse_binary_json(EncodedKey, update)) end}, {"check that complete update key with an invalid date is rejected", fun() -> @@ -186,7 +186,7 @@ parse_binary_json_update_test_() -> fun() -> Key = example_key(<<"infinity">>, <<"-----BEGIN PUBLIC KEY-----\ninvalid_key\n-----END PUBLIC KEY-----">>), EncodedKey = chef_json:encode(Key), - ?assertThrow({ej_invalid,fun_match,_,_,_,_,_}, chef_key:parse_binary_json(EncodedKey, update)) + ?assertThrow({ej_invalid, fun_match, _, _, _, _, _}, chef_key:parse_binary_json(EncodedKey, update)) end} ]. @@ -208,7 +208,7 @@ parse_binary_json_create_test_() -> OriginalKey = example_key(<<"infinity">>, undefined), Key = ej:set({<<"name">>}, OriginalKey, <<"invalid^name">>), EncodedKey = chef_json:encode(Key), - ?assertThrow({ej_invalid,string_match,_,_,_,_,_}, chef_key:parse_binary_json(EncodedKey, create)) + ?assertThrow({ej_invalid, string_match, _, _, _, _, _}, chef_key:parse_binary_json(EncodedKey, create)) end}, {"check that key with an invalid date is rejected", fun() -> @@ -220,7 +220,7 @@ parse_binary_json_create_test_() -> fun() -> Key = example_key(<<"infinity">>, <<"-----BEGIN PUBLIC KEY-----\ninvalid_key\n-----END PUBLIC KEY-----">>), EncodedKey = chef_json:encode(Key), - ?assertThrow({ej_invalid,fun_match,_,_,_,_,_}, chef_key:parse_binary_json(EncodedKey, create)) + ?assertThrow({ej_invalid, fun_match, _, _, _, _, _}, chef_key:parse_binary_json(EncodedKey, create)) end} ]. diff --git a/src/oc_erchef/apps/chef_objects/test/chef_metrics_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_metrics_tests.erl index 0621632d71..4afa6e135f 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_metrics_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_metrics_tests.erl @@ -59,9 +59,9 @@ label_test_() -> application:set_env(chef_objects, s3_url, S3Url), application:set_env(chef_objects, s3_platform_bucket_name, Bucket) end, - fun(_,_) -> ok end, + fun(_, _) -> ok end, [{{Url, Bucket}, - fun(_,_) -> + fun(_, _) -> ?_assertEqual(Label, chef_metrics:label(Upstream, {Mod, Fun})) end} || {Url, Bucket, Upstream, Mod, Fun, Label} <- [{"http://s3.amazonaws.com", "i.haz.a.bukkit", diff --git a/src/oc_erchef/apps/chef_objects/test/chef_node_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_node_tests.erl index 814630a1a0..1f4235997f 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_node_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_node_tests.erl @@ -392,7 +392,7 @@ basic_node_index_with_policy_info() -> merge({L1}, {L2}) -> D1 = dict:from_list(L1), D2 = dict:from_list(L2), - Merged = dict:merge(fun(_K,_V1,V2) -> V2 end, D1,D2), + Merged = dict:merge(fun(_K, _V1, V2) -> V2 end, D1, D2), {dict:to_list(Merged)}. to_sorted_list({L}) -> diff --git a/src/oc_erchef/apps/chef_objects/test/chef_sandbox_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_sandbox_tests.erl index 46de9ed04c..28ff66470c 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_sandbox_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_sandbox_tests.erl @@ -104,7 +104,7 @@ sandbox_join_rows_to_record_test_() -> ?assertEqual(chef_sandbox:sandbox_join_rows_to_record(sandbox_rows()), #chef_sandbox{id = <<"deadbeefdeadbeefdeadbeefdeadbeef">>, org_id = <<"abad1deaabad1deaabad1deaabad1dea">>, - created_at = {{2012,4,25},{3,7,43.0}}, + created_at = {{2012, 4, 25}, {3, 7, 43.0}}, checksums = [ {<<"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, false}, {<<"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb">>, true}, @@ -119,22 +119,22 @@ sandbox_rows() -> [ [{<<"sandbox_id">>, <<"deadbeefdeadbeefdeadbeefdeadbeef">>}, {<<"org_id">>, <<"abad1deaabad1deaabad1deaabad1dea">>}, - {<<"created_at">>, {{2012,4,25},{3,7,43.0}}}, + {<<"created_at">>, {{2012, 4, 25}, {3, 7, 43.0}}}, {<<"checksum">>, <<"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>}, {<<"uploaded">>, false}], [{<<"sandbox_id">>, <<"deadbeefdeadbeefdeadbeefdeadbeef">>}, {<<"org_id">>, <<"abad1deaabad1deaabad1deaabad1dea">>}, - {<<"created_at">>, {{2012,4,25},{3,7,43.0}}}, + {<<"created_at">>, {{2012, 4, 25}, {3, 7, 43.0}}}, {<<"checksum">>, <<"bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb">>}, {<<"uploaded">>, true}], [{<<"sandbox_id">>, <<"deadbeefdeadbeefdeadbeefdeadbeef">>}, {<<"org_id">>, <<"abad1deaabad1deaabad1deaabad1dea">>}, - {<<"created_at">>, {{2012,4,25},{3,7,43.0}}}, + {<<"created_at">>, {{2012, 4, 25}, {3, 7, 43.0}}}, {<<"checksum">>, <<"cccccccccccccccccccccccccccccccc">>}, {<<"uploaded">>, false}], [{<<"sandbox_id">>, <<"deadbeefdeadbeefdeadbeefdeadbeef">>}, {<<"org_id">>, <<"abad1deaabad1deaabad1deaabad1dea">>}, - {<<"created_at">>, {{2012,4,25},{3,7,43.0}}}, + {<<"created_at">>, {{2012, 4, 25}, {3, 7, 43.0}}}, {<<"checksum">>, <<"dddddddddddddddddddddddddddddddd">>}, {<<"uploaded">>, true}] ]. diff --git a/src/oc_erchef/apps/chef_objects/test/chef_user_tests.erl b/src/oc_erchef/apps/chef_objects/test/chef_user_tests.erl index 08d009c3cb..cc17aa6b9a 100644 --- a/src/oc_erchef/apps/chef_objects/test/chef_user_tests.erl +++ b/src/oc_erchef/apps/chef_objects/test/chef_user_tests.erl @@ -71,7 +71,7 @@ assemble_user_ejson_non_deprecated_test_() -> fun chef_objects_test_utils:bcrypt_cleanup/1, chef_objects_test_utils:make_non_deprecated_tests(fun assemble_user_ejson_non_deprecated_tests/1) }. -assemble_user_ejson_non_deprecated_tests(Version)-> +assemble_user_ejson_non_deprecated_tests(Version) -> {setup, fun chef_objects_test_utils:bcrypt_setup/0, fun chef_objects_test_utils:bcrypt_cleanup/1, diff --git a/src/oc_erchef/apps/chef_objects/test/rebar.lock b/src/oc_erchef/apps/chef_objects/test/rebar.lock new file mode 100644 index 0000000000..57afcca045 --- /dev/null +++ b/src/oc_erchef/apps/chef_objects/test/rebar.lock @@ -0,0 +1 @@ +[]. diff --git a/src/oc_erchef/apps/chef_telemetry/src/chef_telemetry_worker.erl b/src/oc_erchef/apps/chef_telemetry/src/chef_telemetry_worker.erl index 6ae63673e5..735ae46cb9 100644 --- a/src/oc_erchef/apps/chef_telemetry/src/chef_telemetry_worker.erl +++ b/src/oc_erchef/apps/chef_telemetry/src/chef_telemetry_worker.erl @@ -70,7 +70,7 @@ start_link() -> init(_Config) -> {ConfigFile, CtlLocation} = case re:run(os:cmd("pwd"), "^/hab/.*") of - {match,_} -> + {match, _} -> {"Hab infra server", "Hab infra server"}; _ -> CtlLocation1 = @@ -158,8 +158,8 @@ send_data(State) -> Funs = [fun get_total_nodes/1, fun get_active_nodes/1, fun get_company_name/1, fun get_api_fqdn/1, fun determine_license_id/1], Pid = self(), Res = [ erlang:spawn_monitor(runner(Pid, State1, Fun)) || Fun <- Funs ], - Current_scan = gather_res(Res, State1#state.current_scan, length(Funs)), - Req = generate_request(ServerVersion, State1#state{current_scan = Current_scan}), + CurrentScan = gather_res(Res, State1#state.current_scan, length(Funs)), + Req = generate_request(ServerVersion, State1#state{current_scan = CurrentScan}), send_req(Req, State1), State1; _ -> @@ -174,7 +174,7 @@ get_api_fqdn(_State) -> sqerl:execute(<<"delete from telemetry where property like 'NODE:%' and event_timestamp < (current_timestamp - interval '86700')">>), case sqerl:execute(<<"select trim(property) as property from telemetry where property like 'NODE:%'">>) of {ok, Rows} when is_list(Rows) -> - FQDNs = [binary:part(FQDN, 5, size(FQDN) -5) || [{<<"property">>, FQDN}] <- Rows], + FQDNs = [binary:part(FQDN, 5, size(FQDN) - 5) || [{<<"property">>, FQDN}] <- Rows], FQDNs1 = mask(FQDNs), FQDNs1; _ -> @@ -211,11 +211,11 @@ solr_search(Query) -> {Error, Reason} end. -get_license_company_name()-> - {_Lic, _Type, _GracePeriod, _ExpDate, _Msg, CN,_LID} = chef_license:get_license(), +get_license_company_name() -> + {_Lic, _Type, _GracePeriod, _ExpDate, _Msg, CN, _LID} = chef_license:get_license(), CN. -determine_license_id(_State)-> +determine_license_id(_State) -> {_Lic, _Type, _GracePeriod, _ExpDate, _Msg, _CN, LicenseID} = chef_license:get_license(), case LicenseID of undefined -> @@ -369,9 +369,9 @@ send_req(Req, State) -> check_send(Hostname) -> case sqerl:execute(<<"select telemetry_check_send('", Hostname/binary, "')">>) of - {ok,[[{_, true}]]} -> + {ok, [[{_, true}]]} -> true; - {ok,[[{_, false}]]} -> + {ok, [[{_, false}]]} -> false; Error -> Error @@ -394,15 +394,15 @@ mask(FQDNs) -> Hash = crypto:hash(md5, FQDN1), Domain2 = <<"">>; _ -> - FQDN_parts = binary:split(FQDN1, <<"\.">>, [global]), - case size(lists:last(FQDN_parts)) =:= 2 of + FQDNparts = binary:split(FQDN1, <<"\.">>, [global]), + case size(lists:last(FQDNparts)) =:= 2 of true -> - {SubDomain1, Domain1} = lists:split(erlang:length(FQDN_parts) - 3, FQDN_parts), + {SubDomain1, Domain1} = lists:split(erlang:length(FQDNparts) - 3, FQDNparts), SubDomain2 = Join(SubDomain1, <<".">>), Domain2 = Join(Domain1, <<".">>); _ -> - {SubDomain1, Domain1} = lists:split(erlang:length(FQDN_parts) - 2, FQDN_parts), + {SubDomain1, Domain1} = lists:split(erlang:length(FQDNparts) - 2, FQDNparts), SubDomain2 = Join(SubDomain1, <<".">>), Domain2 = Join(Domain1, <<".">>) end, @@ -446,12 +446,12 @@ runner(Parent, State, Fun) -> Parent ! {result, self(), Res} end. -gather_res(_Ids, Res, Count) when Count =< 0-> +gather_res(_Ids, Res, Count) when Count =< 0 -> Res; gather_res(Ids, Res, Count) -> Fun = fun(Id) -> - fun({Id1,_}) -> + fun({Id1, _}) -> Id =/= Id1 end end, @@ -476,4 +476,4 @@ gather_res(Ids, Res, Count) -> after 60000 -> Res - end. \ No newline at end of file + end. diff --git a/src/oc_erchef/apps/data_collector/rebar.config b/src/oc_erchef/apps/data_collector/rebar.config index a84b5bc0c3..a3ca5f1d94 100644 --- a/src/oc_erchef/apps/data_collector/rebar.config +++ b/src/oc_erchef/apps/data_collector/rebar.config @@ -10,7 +10,7 @@ {opscoderl_httpc, ".*", {git, "https://github.com/chef/opscoderl_httpc", {branch, "main"}}}, {pooler, ".*", - {git, "https://github.com/chef/pooler", {branch, "master"}}} + {git, "https://github.com/chef/pooler", {branch, "CHEF-11677/CHEF-12498/lbaker"}}} ] }. diff --git a/src/oc_erchef/apps/data_collector/rebar.lock b/src/oc_erchef/apps/data_collector/rebar.lock index cda48ad884..cab3c041cd 100644 --- a/src/oc_erchef/apps/data_collector/rebar.lock +++ b/src/oc_erchef/apps/data_collector/rebar.lock @@ -19,5 +19,5 @@ 0}, {<<"pooler">>, {git,"https://github.com/chef/pooler", - {ref,"681c355abaacc5487ddf41a84b9ed53151a765fe"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}]. diff --git a/src/oc_erchef/apps/data_collector/src/data_collector_http.erl b/src/oc_erchef/apps/data_collector/src/data_collector_http.erl index b29c459a32..e32912f20c 100644 --- a/src/oc_erchef/apps/data_collector/src/data_collector_http.erl +++ b/src/oc_erchef/apps/data_collector/src/data_collector_http.erl @@ -66,12 +66,12 @@ delete(Path, Body) -> delete(Path, Body, Headers) -> request_with_caught_errors(Path, delete, Body, Headers). -request_with_caught_errors(Path, Method, Body, Headers) when is_list(Body)-> +request_with_caught_errors(Path, Method, Body, Headers) when is_list(Body) -> request_with_caught_errors(Path, Method, iolist_to_binary(Body), Headers); request_with_caught_errors(Path, Method, Body, Headers) -> try case request(Path, Method, Body, Headers) of - {ok, [$2|_], _Head, _RespBody} -> ok; + {ok, [$2 | _], _Head, _RespBody} -> ok; Error -> {error, Error} end catch diff --git a/src/oc_erchef/apps/depsolver/src/depsolver.erl b/src/oc_erchef/apps/depsolver/src/depsolver.erl index 0e6665064f..895f07a71a 100644 --- a/src/oc_erchef/apps/depsolver/src/depsolver.erl +++ b/src/oc_erchef/apps/depsolver/src/depsolver.erl @@ -147,7 +147,7 @@ parse_version(Vsn) %% return every member of that list that matches all constraints. -spec filter_packages([{pkg_name(), raw_vsn()}], [raw_constraint()]) -> {ok, [{pkg_name(), raw_vsn()}]} - | {error, Reason::term()}. + | {error, Reason :: term()}. filter_packages(PVPairs, RawConstraints) -> Constraints = [fix_con(Constraint) || Constraint <- RawConstraints], case check_constraints(Constraints) of @@ -254,8 +254,8 @@ is_valid_constraint({_Pkg, _LVsn1, _LVsn2, between}) -> is_valid_constraint(_InvalidConstraint) -> false. --spec is_version_within_constraint(vsn(),constraint()) -> boolean(). -is_version_within_constraint({missing}, _Pkg)-> +-spec is_version_within_constraint(vsn(), constraint()) -> boolean(). +is_version_within_constraint({missing}, _Pkg) -> false; is_version_within_constraint(_Vsn, Pkg) when is_atom(Pkg) orelse is_binary(Pkg) -> true; diff --git a/src/oc_erchef/apps/depsolver/test/depsolver_tests.erl b/src/oc_erchef/apps/depsolver/test/depsolver_tests.erl index 8be958cde5..b987551745 100644 --- a/src/oc_erchef/apps/depsolver/test/depsolver_tests.erl +++ b/src/oc_erchef/apps/depsolver/test/depsolver_tests.erl @@ -82,4 +82,4 @@ filter_versions() -> Ret = depsolver:filter_packages(Packages, [{"foo", "1.0.0", '~~~~'} | Cons]), - ?assertMatch({error, {invalid_constraints, [{<<"foo">>,{{1,0,0},{[],[]}},'~~~~'}]}}, Ret). + ?assertMatch({error, {invalid_constraints, [{<<"foo">>, {{1,0,0}, {[],[]}},'~~~~'}]}}, Ret). diff --git a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_SUITE.erl b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_SUITE.erl index 15dd2917bb..e42c300cd0 100644 --- a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_SUITE.erl +++ b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_SUITE.erl @@ -35,7 +35,7 @@ all() -> [fetch_container]. init_per_suite(LastConfig) -> - Config = chef_test_db_helper:start_db([{app, oc_chef_authz}|LastConfig], "oc_chef_authz_itests"), + Config = chef_test_db_helper:start_db([{app, oc_chef_authz} | LastConfig], "oc_chef_authz_itests"), suite_helper:start_server(Config), OrgsConfig = chef_test_suite_helper:make_orgs(), OrgsConfig ++ Config. diff --git a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_policies_SUITE.erl b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_policies_SUITE.erl index e19fd8ef71..a03ffdff6f 100644 --- a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_policies_SUITE.erl +++ b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_authz_policies_SUITE.erl @@ -349,7 +349,7 @@ fetch_prereq_objects_when_missing_rev(Config) -> ?assertEqual(Expected, Result). -verify_insert_policy_group_association_missing_group(Config)-> +verify_insert_policy_group_association_missing_group(Config) -> Assoc = pgr_assoc_missing_group(Config), Context = chef_test_suite_helper:context(), Actor = chef_test_suite_helper:actor_id(), diff --git a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_group_SUITE.erl b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_group_SUITE.erl index c13a50a449..4302c7a677 100644 --- a/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_group_SUITE.erl +++ b/src/oc_erchef/apps/oc_chef_authz/itest/oc_chef_group_SUITE.erl @@ -35,7 +35,7 @@ suite() -> [{timetrap,{seconds,30}}]. init_per_suite(LastConfig) -> - Config = chef_test_db_helper:start_db([{app, oc_chef_authz}|LastConfig], "oc_chef_authz_itests"), + Config = chef_test_db_helper:start_db([{app, oc_chef_authz} | LastConfig], "oc_chef_authz_itests"), [{tables, ["groups", "clients", "users"]} | suite_helper:start_server(Config)]. end_per_suite(Config) -> @@ -353,8 +353,8 @@ convert_to_path(BasePath, Elements) -> insert_user(Username) -> UserRecord = chef_user_record(Username, chef_test_suite_helper:make_az_id(Username) ), - [_,_| Values] = tuple_to_list(UserRecord), - [_|FieldNames] = record_info(fields, chef_user), + [_, _ | Values] = tuple_to_list(UserRecord), + [_ | FieldNames] = record_info(fields, chef_user), Input = lists:zip(FieldNames, Values), ?assertEqual({ok, 1}, sqerl:adhoc_insert(users, [Input])). diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz.erl index adf9fbd47c..7368ab8119 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz.erl @@ -145,8 +145,8 @@ get_container_aid_for_object(Context, OrgId, ObjectType) -> %% with the ACL for `ContainerAId' will be merged into the ACL for the new object. %% %% @end TODO: consider error cases in more detail --spec create_entity_with_container_acl(RequestorId::requestor_id(), - ContainerAId::object_id(), +-spec create_entity_with_container_acl(RequestorId :: requestor_id(), + ContainerAId :: object_id(), ObjectType :: chef_wm:container_name() %'client' | 'container' | %'cookbook' | 'data' | 'environment' | @@ -226,10 +226,10 @@ merge_acl_from_container(RequestorId, ContainerId, AuthzType, ObjectId) -> NewAcl :: authz_acl()) -> ok | {error, any()}. set_acl(_RequestorId, _AuthzType, _ObjectId, []) -> ok; -set_acl(RequestorId, AuthzType, ObjectId, [{Method, ACE}|Rest]) when AuthzType =:= 'actor'; - AuthzType =:= 'object'; - AuthzType =:= 'container'; - AuthzType =:= 'group' -> +set_acl(RequestorId, AuthzType, ObjectId, [{Method, ACE} | Rest]) when AuthzType =:= 'actor'; + AuthzType =:= 'object'; + AuthzType =:= 'container'; + AuthzType =:= 'group' -> case set_ace_for_entity(RequestorId, AuthzType, ObjectId, Method, ACE) of ok -> set_acl(RequestorId, AuthzType, ObjectId, Rest); @@ -318,8 +318,8 @@ get_data_for_id([Id | _Ids] = OIds , [{RId, RData} | Resources], Denied, Permitt % Corresponds to GET /{actors|containers|groups|objects}/:id/acl/{actors|groups}/:member_id % -spec is_authorized_on_resource(requestor_id(), resource_type(), object_id(), - 'actor'|'group', actor_id(), access_method()) - -> true|false|{error,any()}. + 'actor' | 'group', actor_id(), access_method()) + -> true | false | {error, any()}. is_authorized_on_resource(RequestorId, ResourceType, ResourceId, ActorType, ActorId, AccessMethod) when is_atom(ResourceType) and is_atom(ActorType) and is_atom(AccessMethod) -> Url = make_url([pluralize_resource(ResourceType), ResourceId, <<"acl">>, @@ -337,7 +337,7 @@ is_authorized_on_resource(RequestorId, ResourceType, ResourceId, ActorType, Acto % Corresponds to GET /groups/:id/transitive_member/:member_id % -spec is_actor_transitive_member_of_group(requestor_id(), object_id(), actor_id()) - -> true|false|{error, not_found}|{error,server_error}. + -> true | false | {error, not_found} | {error, server_error}. is_actor_transitive_member_of_group(RequestorId, ActorId, GroupId) -> Url = make_url([groups, GroupId, transitive_member, actors, ActorId]), case oc_chef_authz_http:request(Url, get, [], [], RequestorId) of @@ -352,7 +352,7 @@ is_actor_transitive_member_of_group(RequestorId, ActorId, GroupId) -> % % This succeeds unless authz is truly foobared. It doesn't seem to care what the requestor-id is. % --spec create_resource(requestor_id(), actor|container|group|object) -> {ok, object_id()} | +-spec create_resource(requestor_id(), actor | container | group | object) -> {ok, object_id()} | {error, server_error}. create_resource(RequestorId, ResourceType) -> %% authz can return 500, and we'll throw. I think that is correct @@ -367,8 +367,8 @@ create_resource(RequestorId, ResourceType) -> % Delete entity in authz % Corresponds to DELETE /{actors|groups|objects}/:id % No delete for containers... --spec delete_resource(requestor_id(), 'actor'|'group'|'object', object_id()) - -> ok | {error, forbidden|not_found|server_error}. +-spec delete_resource(requestor_id(), 'actor' | 'group' | 'object', object_id()) + -> ok | {error, forbidden | not_found | server_error}. delete_resource(RequestorId, ResourceType, Id) -> EffectiveRequestorId = requestor_or_superuser(RequestorId), Url = make_url([pluralize_resource(ResourceType), Id]), @@ -380,9 +380,9 @@ delete_resource(RequestorId, ResourceType, Id) -> %% Give a resource access to an entity by adding the resource %% to the entity's ACE --spec add_ace_for_entity(requestor_id(), group|actor, object_id(), +-spec add_ace_for_entity(requestor_id(), group | actor, object_id(), resource_type(), object_id(), access_method()) -> - ok | {error, forbidden|not_found|server_error}. + ok | {error, forbidden | not_found | server_error}. add_ace_for_entity(RequestorId, ResourceType, ResourceId, EntityType, EntityId, Method) -> @@ -393,9 +393,9 @@ add_ace_for_entity(RequestorId, ResourceType, ResourceId, %% Deny a resource access to an entity by adding the resource %% to the entity's ACE. Note that if the resource has the access to the %% same entity via another means, this will not change --spec remove_ace_for_entity(requestor_id(), group|actor, object_id(), +-spec remove_ace_for_entity(requestor_id(), group | actor, object_id(), resource_type(), object_id(), access_method()) -> - ok | {error, forbidden|not_found|server_error}. + ok | {error, forbidden | not_found | server_error}. remove_ace_for_entity(RequestorId, ResourceType, ResourceId, EntityType, EntityId, Method) -> update_ace_for_entity(RequestorId, ResourceType, ResourceId, @@ -437,7 +437,7 @@ add_if_missing(Item, List) -> % GET {objects|groups|actors|containers}/:id/acl % -spec get_acl_for_resource(requestor_id(), resource_type(), binary()) -> - {ok, authz_acl()}|{error, any()}. + {ok, authz_acl()} | {error, any()}. get_acl_for_resource(RequestorId, ResourceType, Id) -> Url = make_url([pluralize_resource(ResourceType), Id, acl]), case oc_chef_authz_http:request(Url, get, [], [], RequestorId) of @@ -532,8 +532,8 @@ add_to_group(GroupAuthzId, Type, AuthzId, RequestorId) -> %% Both `TargetActorId' and `ActorIdToRemove' are assumed here to be the AuthzId of actors, %% not any other kind of Authz object (group, container, or object). This should be %% verified by callers of this function. --spec remove_actor_from_actor_acl(ActorIdToRemove::object_id(), - TargetActorId::object_id()) -> ok | {error, any()}. +-spec remove_actor_from_actor_acl(ActorIdToRemove :: object_id(), + TargetActorId :: object_id()) -> ok | {error, any()}. remove_actor_from_actor_acl(ActorIdToRemove, TargetActorId) -> %% Target actor fetches its own ACL {ok, Acl} = get_acl_for_resource(TargetActorId, actor, TargetActorId), @@ -543,35 +543,35 @@ remove_actor_from_actor_acl(ActorIdToRemove, TargetActorId) -> set_acl(TargetActorId, actor, TargetActorId, FilteredAcl). %% @doc Front-end to recursive implementation in `remove_actor_from_acl/3`. --spec remove_actor_from_acl(ActorId::object_id(), Acl::authz_acl()) -> authz_acl(). +-spec remove_actor_from_acl(ActorId :: object_id(), Acl :: authz_acl()) -> authz_acl(). remove_actor_from_acl(ActorId, Acl) -> remove_actor_from_acl(ActorId, Acl, []). %% @doc Removes `ActorId` from all `actors` lists in the given ACL. --spec remove_actor_from_acl(ActorId::object_id(), - AclToProcess:: [] | authz_acl(), - FilteredAcl::[] | authz_acl()) -> +-spec remove_actor_from_acl(ActorId :: object_id(), + AclToProcess :: [] | authz_acl(), + FilteredAcl :: [] | authz_acl()) -> authz_acl(). remove_actor_from_acl(_ActorId, [], Acc) -> lists:reverse(Acc); -remove_actor_from_acl(ActorId, [{Permission, Ace}|Rest], Acc) -> +remove_actor_from_acl(ActorId, [{Permission, Ace} | Rest], Acc) -> Filtered = remove_actor_from_ace(ActorId, Ace), remove_actor_from_acl(ActorId, Rest, [{Permission, Filtered} | Acc]). %% @doc Returns the given authz_ace with `ActorId' filtered out of the `actors' list. The %% `groups' list is untouched. --spec remove_actor_from_ace(ActorId::object_id(), Ace::#authz_ace{}) -> #authz_ace{}. +-spec remove_actor_from_ace(ActorId :: object_id(), Ace :: #authz_ace{}) -> #authz_ace{}. remove_actor_from_ace(ActorId, #authz_ace{actors=Actors, groups=Groups}) -> #authz_ace{actors=[A || A <- Actors, A /= ActorId], groups=Groups}. --spec pluralize_resource(resource_type()) -> <<_:48,_:_*8>>. +-spec pluralize_resource(resource_type()) -> <<_:48, _:_*8>>. pluralize_resource(actor) -> <<"actors">>; pluralize_resource(container) -> <<"containers">>; pluralize_resource(group) -> <<"groups">>; pluralize_resource(object) -> <<"objects">>. --spec object_type_to_container_name(chef_wm:container_name()) -> <<_:32,_:_*8>>. +-spec object_type_to_container_name(chef_wm:container_name()) -> <<_:32, _:_*8>>. object_type_to_container_name(client) -> <<"clients">>; object_type_to_container_name(container) -> <<"containers">>; object_type_to_container_name(cookbook) -> <<"cookbooks">>; @@ -613,9 +613,9 @@ to_text(E) when is_atom(E) -> to_text(E) when is_list(E) -> E. --spec make_url([string()|binary()|atom(),...]) -> string(). +-spec make_url([string() | binary() | atom(), ...]) -> string(). make_url(Components) -> - string:join([to_text(E) || E <- Components],"/"). + string:join([to_text(E) || E <- Components], "/"). % Extract actors and groups from the % TODO refine spec @@ -639,7 +639,7 @@ extract_ace(JsonBlob) -> -spec extract_acl(jiffy:json_value()) -> authz_acl(). extract_acl(JsonBlob) -> - [ {PAtom, extract_ace(ej:get({PBin},JsonBlob))} || {PAtom, PBin} <- ?ATOM_BIN_PERMS ]. + [ {PAtom, extract_ace(ej:get({PBin}, JsonBlob))} || {PAtom, PBin} <- ?ATOM_BIN_PERMS ]. % % This is needed by the container permission inheritance @@ -654,7 +654,7 @@ merge_ace(#authz_ace{actors=Ace1Actors, groups=Ace1Groups}, -spec merge_acl(authz_acl(), authz_acl()) -> authz_acl(). merge_acl(Acl1, Acl2) -> - [{K, merge_ace(A,B)} || {{K, A}, {K, B}} <- lists:zip(Acl1, Acl2)]. + [{K, merge_ace(A, B)} || {{K, A}, {K, B}} <- lists:zip(Acl1, Acl2)]. %% unit tests for internal functions diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl.erl index 181da6e93f..006462921c 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl.erl @@ -89,7 +89,7 @@ validate_actors_clients_users(Part, FullACL) -> end end. --spec update_part(string(), ejson_term(), chef_type() | chef_authz_type(), id(), id())-> +-spec update_part(string(), ejson_term(), chef_type() | chef_authz_type(), id(), id()) -> {ok, ejson_term()}. update_part(Part, AceRecord, Type, AuthzId, OrgId) -> Ids = names_to_ids(ej:get({Part}, AceRecord), OrgId), @@ -215,7 +215,7 @@ fetch_cookbook_id(DbContext, Name, OrgId) -> fetch(Type, OrgId, AuthzId) -> fetch(Type, OrgId, AuthzId, undefined). --spec fetch(chef_type(), binary(), id(), granular|undefined) -> ejson_term() | {error, term()} | forbidden. +-spec fetch(chef_type(), binary(), id(), granular | undefined) -> ejson_term() | {error, term()} | forbidden. fetch(Type, OrgId, AuthzId, Granular) -> Path = acl_path(Type, AuthzId), Result = oc_chef_authz_http:request(Path, get, ?DEFAULT_HEADERS, [], superuser_id()), @@ -311,7 +311,7 @@ convert_all_ids_to_names(OrgId, Record, Granular) -> <<"delete">>, <<"grant">>], OrgId, Record, Granular). --spec convert_ids_to_names_in_part(list(binary()), binary(), ejson_term(), granular|undefined) -> ejson_term(). +-spec convert_ids_to_names_in_part(list(binary()), binary(), ejson_term(), granular | undefined) -> ejson_term(). convert_ids_to_names_in_part([], _OrgId, Record, _Granular) -> Record; convert_ids_to_names_in_part([Part | Rest], OrgId, Record, Granular) -> @@ -368,7 +368,7 @@ acl_path(Type, AuthzId) -> -spec acl_path(chef_type() | chef_authz_type(), id(), string() ) -> string(). acl_path(Type, AuthzId, Part) -> - acl_path(Type,AuthzId) ++ "/" ++ Part. + acl_path(Type, AuthzId) ++ "/" ++ Part. -spec acl_auth_path(chef_type() | chef_authz_type(), id(), binary() ) -> string(). acl_auth_path(Type, AuthzId, RequestorId) -> diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl_constraints.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl_constraints.erl index 1e2d724946..9bcc7a819f 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl_constraints.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_acl_constraints.erl @@ -28,12 +28,12 @@ -export([check_acl_constraints/5]). --spec check_acl_constraints(binary(), binary(), atom(), binary(), tuple()) -> ok | [atom(),...]. +-spec check_acl_constraints(binary(), binary(), atom(), binary(), tuple()) -> ok | [atom(), ...]. check_acl_constraints(OrgId, AuthzId, Type, AclPerm, Ace) -> check_acl_constraints(OrgId, AuthzId, Type, AclPerm, Ace, acl_checks()). --spec check_acl_constraints(binary(), binary(), atom(), binary(), tuple(), [fun()]) -> ok | [atom(),...]. +-spec check_acl_constraints(binary(), binary(), atom(), binary(), tuple(), [fun()]) -> ok | [atom(), ...]. check_acl_constraints(OrgId, AuthzId, Type, AclPerm, Ace, AclChecks) -> case lists:filtermap(fun(Check) -> Check(OrgId, AuthzId, Type, AclPerm, Ace) end, AclChecks) of [] -> diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_app.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_app.erl index d1410fc216..9da8fd37b1 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_app.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_app.erl @@ -7,10 +7,10 @@ %% Application callbacks -export([start/2, stop/1]). -start()-> - start(type,args). +start() -> + start(type, args). -stop()-> +stop() -> stop(state). start(_StartType, _StartArgs) -> diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_cleanup.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_cleanup.erl index c9ecfb7ae9..7e8765f156 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_cleanup.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_cleanup.erl @@ -187,7 +187,7 @@ handle_sync_event(_Event, _From, StateName, State) -> %% message other than a synchronous or asynchronous event %% (or a system message). %% -%% @spec handle_info(Info,StateName,State)-> +%% @spec handle_info(Info,StateName,State) -> %% {next_state, NextStateName, NextState} | %% {next_state, NextStateName, NextState, Timeout} | %% {stop, Reason, NewState} @@ -236,7 +236,7 @@ process_batch(State = #state{authz_ids = {ActorSet, GroupSet}}) -> length(GroupAuthzIdsToRemove), length(RemainingGroups) } of - {0,_,0,_} -> + {0, _, 0, _} -> ok; {LengthActors, LengthRemainingActors, LengthGroups, LengthRemainingGroups} -> error_logger:info_msg( diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_org_creator.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_org_creator.erl index 2e8a9e1eae..09335a2f41 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_org_creator.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_org_creator.erl @@ -77,7 +77,7 @@ {acls, [ %% Billing admins is very restrictive. - {add_acl, [{group, 'billing-admins'}], [read,update], [{user, creator},{group, 'billing-admins'}]}, + {add_acl, [{group, 'billing-admins'}], [read, update], [{user, creator}, {group, 'billing-admins'}]}, %% Creator (superuser normally) goes everywhere {add_acl, @@ -135,11 +135,11 @@ process_policy(#oc_chef_organization{} = Org, process_policy([], _, _, _Cache) -> %% This is where we might extract some stuff from cache to descibe the created org ok; -process_policy([PolicyEntry|Policy], Org, User, Cache) -> +process_policy([PolicyEntry | Policy], Org, User, Cache) -> case process_policy_step(PolicyEntry, Org, User, Cache) of {error, _} = Error -> Error; {Cache1, Steps} -> - process_policy(Steps++Policy, Org, User, Cache1) + process_policy(Steps ++ Policy, Org, User, Cache1) end. %% Returns a tuple of updated cache, and expanded steps to process @@ -167,7 +167,7 @@ process_policy_step({add_to_groups, ActorType, Members, Groups}, %% TODO capture error return [oc_chef_authz:add_to_group(GroupId, Type, MemberId, superuser) || {_, GroupId} <- GroupIds, - {Type,MemberId} <- MemberIds], + {Type, MemberId} <- MemberIds], {Cache, []}; process_policy_step({create_org_read_access_group}, #oc_chef_organization{name=OrgName, server_api_version=ApiVersion}, @@ -189,10 +189,10 @@ process_policy_step({acls, Steps}, _Org, _User, Cache) -> %% create_object(_, _, _, _, [], Cache) -> Cache; -create_object(ApiVersion, OrgId, RequestorId, Type, [Name|Remaining], Cache) -> +create_object(ApiVersion, OrgId, RequestorId, Type, [Name | Remaining], Cache) -> case create_helper(ApiVersion, OrgId, RequestorId, Type, Name) of AuthzId when is_binary(AuthzId) -> - NewCache = add_cache(Cache,{Type, Name}, AuthzId), + NewCache = add_cache(Cache, {Type, Name}, AuthzId), create_object(ApiVersion, OrgId, RequestorId, Type, Remaining, NewCache); Error -> %% Do we clean up created authz stuff here, or save it for @@ -257,11 +257,11 @@ update_acl_step({add_acl, Objects, Actions, Members}, Acls) -> fun(M, {C, U, G}) -> case M of {user, N} -> - {C, [N|U],G}; + {C, [N | U], G}; {client, N} -> - {[N|C], U, G}; + {[N | C], U, G}; {group, N} -> - {C, U, [N|G]} + {C, U, [N | G]} end end, {[], [], []}, lists:flatten(Members)), AceToAdd = #hr_ace{clients=Clients, users=Users, groups=Groups}, @@ -323,7 +323,7 @@ init_cache(#oc_chef_organization{authz_id=OrgAuthzId}, %% Notes: we assume the creator is a superuser; Elements = [ { {user, creator}, CreatorAuthzId }, { {organization}, OrgAuthzId } ], - InsertFun = fun({Item,AuthzId}, Acc) -> + InsertFun = fun({Item, AuthzId}, Acc) -> add_cache(Acc, Item, AuthzId) end, lists:foldl(InsertFun, dict:new(), Elements). @@ -339,20 +339,20 @@ add_cache(C, {Type}, AuthzId) -> set({Type}, {Resource, AuthzId}, C). objectlist_to_authz(C, Type, BareObjectList) -> - [find({Type,O},C) || O <- lists:flatten(BareObjectList)]. + [find({Type, O}, C) || O <- lists:flatten(BareObjectList)]. ace_to_authz(C, #hr_ace{clients=Clients, users=Users, groups=Groups}) -> {_, ClientIds} = lists:unzip(objectlist_to_authz(C, client, Clients)), {_, UserIds} = lists:unzip(objectlist_to_authz(C, user, Users)), {_, GroupIds} = lists:unzip(objectlist_to_authz(C, group, Groups)), ActorIds = lists:flatten([ClientIds, UserIds]), - #authz_ace{actors=ActorIds,groups=GroupIds}. + #authz_ace{actors=ActorIds, groups=GroupIds}. set(Key, Value, C) -> - dict:store(Key,Value, C). + dict:store(Key, Value, C). find(Key, C) -> - case dict:find(Key,C) of + case dict:find(Key, C) of {ok, Value} -> Value; error -> lager:error("Error processing org creation policy, no definition found for ~p", [Key]), diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_scoped_name.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_scoped_name.erl index 95e0782e7c..348674078b 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_scoped_name.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_authz_scoped_name.erl @@ -97,7 +97,7 @@ initialize_context(OrgId, DbContext, CallBackFun) -> %% Output: %% { [{Name, AuthzId}, [{Name, ErrorType}] } %% --spec names_to_authz_id(lookup_type() ,[binary()], #context{}) -> { [binary()],[{atom(),binary()}] }. +-spec names_to_authz_id(lookup_type() , [binary()], #context{}) -> { [binary()], [{atom(), binary()}] }. names_to_authz_id(Type, Names, MapperContext) -> %% Lower to fully qualified orgname, name ScopedNames = parse_scoped_names(Names, is_scoped_type(Type), MapperContext), @@ -136,17 +136,17 @@ convert_ids_to_names(ActorAuthzIds, GroupAuthzIds, Context) -> %% %% No error handling; we probably should generate an error when we have missing %% --spec find_client_authz_ids([binary()],#context{org_id::binary(), db_callback_fun::db_callback()}) -> [binary()]. +-spec find_client_authz_ids([binary()], #context{org_id :: binary(), db_callback_fun :: db_callback()}) -> [binary()]. find_client_authz_ids(ClientNames, Context) -> {AuthzIds, _Missing} = names_to_authz_id(client, ClientNames, Context), AuthzIds. --spec find_user_authz_ids([binary()],#context{org_id::binary(), db_callback_fun::db_callback()}) -> [binary()]. +-spec find_user_authz_ids([binary()], #context{org_id :: binary(), db_callback_fun :: db_callback()}) -> [binary()]. find_user_authz_ids(UserNames, Context) -> {AuthzIds, _Missing} = names_to_authz_id(user, UserNames, Context), AuthzIds. --spec find_group_authz_ids([binary()],#context{org_id::binary(), db_callback_fun::db_callback()}) -> [binary()]. +-spec find_group_authz_ids([binary()], #context{org_id :: binary(), db_callback_fun :: db_callback()}) -> [binary()]. find_group_authz_ids(GroupNames, Context) -> {AuthzIds, _Missing} = names_to_authz_id(group, GroupNames, Context), AuthzIds. @@ -158,7 +158,7 @@ find_group_authz_ids(GroupNames, Context) -> org_id_to_name(OrgId) -> %% TODO maybe rework this; it bypasses a bunch of our statistics gathering code. case chef_sql:select_rows({find_organization_by_id, [OrgId]}) of - [Org|_Others] -> proplists:get_value(<<"name">>, Org); + [Org | _Others] -> proplists:get_value(<<"name">>, Org); _ -> not_found end. @@ -251,21 +251,21 @@ authz_records_by_name(Type, OrgId, Names) -> {AuthzIds, Remaining, ordsets:new()}. %% Helper functions for oc_chef_authz_db:authz_records_by_name and oc_chef_authz_db:find_org_actors_by_name --spec names_from_records([{binary(),_} | {binary(),_,_}]) -> [binary()]. +-spec names_from_records([{binary(), _} | {binary(), _, _}]) -> [binary()]. names_from_records(Records) -> [ name_from_record(R) || R <- Records]. --spec name_from_record({binary(),_} | {binary(),_,_}) -> binary(). +-spec name_from_record({binary(), _} | {binary(), _, _}) -> binary(). name_from_record({Name, _, _}) -> Name; name_from_record({Name, _}) -> Name. --spec ids_from_records([{_,binary()} | {_,'null' | binary(),'null' | binary()}]) -> [binary()]. +-spec ids_from_records([{_, binary()} | {_, 'null' | binary(), 'null' | binary()}]) -> [binary()]. ids_from_records(Records) -> [ id_from_record(R) || R <- Records ]. --spec id_from_record({_, binary()} | {_, binary()|null, binary()|null}) -> binary(). +-spec id_from_record({_, binary()} | {_, binary() | null, binary() | null}) -> binary(). id_from_record({_, AuthzId}) -> AuthzId; id_from_record({_, UserAuthzId, null}) -> @@ -293,8 +293,8 @@ is_ambiguous_actor({_, _, _}) -> %% Each type of object has different restrictions on its scope. %% -spec authz_id_to_names('client' | 'group' | 'user', [binary()], - #context{org_id::binary(), db_callback_fun::db_callback()}) -> - {[binary()],[binary()]}. + #context{org_id :: binary(), db_callback_fun :: db_callback()}) -> + {[binary()], [binary()]}. authz_id_to_names(group, AuthzIds, #context{org_id = OrgId, db_callback_fun = CallbackFun}) -> {ScopedNames, DiffedList} = query_and_diff_authz_ids(find_scoped_group_name_in_authz_ids, AuthzIds, CallbackFun), {render_names_from_org_id(OrgId, ScopedNames), DiffedList}; @@ -310,9 +310,9 @@ query_and_diff_authz_ids(QueryName, AuthzIds, CallbackFun) -> case CallbackFun({QueryName, [AuthzIds]}) of not_found -> {[], AuthzIds}; - Results when is_list(Results)-> + Results when is_list(Results) -> {ResultNames, FoundAuthzIds} = lists:foldl(fun extract_maybe_scoped_name/2, - {[],[]}, Results), + {[], []}, Results), DiffedList = sets:to_list(sets:subtract(sets:from_list(AuthzIds), sets:from_list(FoundAuthzIds))), {lists:sort(ResultNames), DiffedList}; _Other -> @@ -368,7 +368,7 @@ parse_scoped_names(Names, ScopedOk, Context) -> [ maybe_parse_scoped_name(Name, Pattern, ScopedOk, Context) || Name <- Names ]. -spec maybe_parse_scoped_name(binary(), re:mp(), boolean(), #context{}) -> - #sname{} | {ill_formed_name|inappropriate_scoped_name, binary()}. + #sname{} | {ill_formed_name | inappropriate_scoped_name, binary()}. maybe_parse_scoped_name(Name, Pattern, ScopedOk, Context) -> process_match(re:run(Name, Pattern, [{capture, all, binary}]), Name, Context, ScopedOk). @@ -413,7 +413,7 @@ group_by_key(L) -> %% Expansion of authz ids into scoped names %% Takes {OrgName, Name} pairs in ScopedNames and returns %% list of names with scoping metacharacter inserted --spec render_names_from_org_id(binary(),[{binary(), [binary()]}]) -> [binary()]. +-spec render_names_from_org_id(binary(), [{binary(), [binary()]}]) -> [binary()]. render_names_from_org_id(OrgId, ScopedNames) -> GroupedScopedNames = group_by_key(ScopedNames), Expanded = lists:foldl(fun(E, A) -> render_names_from_org_id_f(OrgId, E, A) end, @@ -439,7 +439,7 @@ render_names_from_org_id_f(_OrgId, {AnotherOrgId, Names}, Expanded) -> [ENames, Expanded] end. --spec make_name(binary(),binary()) -> <<_:16,_:_*8>>. +-spec make_name(binary(), binary()) -> <<_:16, _:_*8>>. make_name(OrgName, Name) -> <>. diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_cookbook_artifact.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_cookbook_artifact.erl index c3baf77ebc..b83a7afff9 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_cookbook_artifact.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_cookbook_artifact.erl @@ -26,7 +26,7 @@ -behaviour(chef_object). --mixin([{chef_object_default_callbacks,[{fetch/2, fetch}]}]). +-mixin([{chef_object_default_callbacks, [{fetch/2, fetch}]}]). %% chef_object behaviour callbacks -export([id/1, diff --git a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_org_user_invite.erl b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_org_user_invite.erl index a20da0616d..b93a8ad2d9 100644 --- a/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_org_user_invite.erl +++ b/src/oc_erchef/apps/oc_chef_authz/src/oc_chef_org_user_invite.erl @@ -126,7 +126,7 @@ validation_spec(create) -> {[ {<<"user">>, string} ]}; validation_spec(response) -> {[ - {<<"response">>,{fun_match, {fun valid_response/1, string, <<"Param response must be either 'accept' or 'reject'">>}}} + {<<"response">>, {fun_match, {fun valid_response/1, string, <<"Param response must be either 'accept' or 'reject'">>}}} ]}. fields_for_update(#oc_chef_org_user_invite{}) -> diff --git a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_constraints_tests.erl b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_constraints_tests.erl index dafbbd2ae0..4605c8e452 100644 --- a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_constraints_tests.erl +++ b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_constraints_tests.erl @@ -79,7 +79,7 @@ check_acl_constraints_no_failures() -> AuthzId = <<"10000000000000000000000000000000">>, Type = group, AclPerm = <<"grant">>, - Ace = {[{<<"grant">>,{[{<<"actors">>,[<<"pivotal">>]},{<<"groups">>,[]}]}}]}, + Ace = {[{<<"grant">>, {[{<<"actors">>, [<<"pivotal">>]}, {<<"groups">>, []}]}}]}, AclChecks = [ fun(_OrgId, _AuthzId, _Type, _AclPerm, _Ace) -> false end ], [ ?_assertEqual(ok, oc_chef_authz_acl_constraints:check_acl_constraints(?ORGID, AuthzId, Type, AclPerm, Ace, AclChecks)) @@ -92,7 +92,7 @@ check_acl_constraints_failures() -> AuthzId = <<"10000000000000000000000000000000">>, Type = group, AclPerm = <<"grant">>, - Ace = {[{<<"grant">>,{[{<<"actors">>,[<<"pivotal">>]},{<<"groups">>,[]}]}}]}, + Ace = {[{<<"grant">>, {[{<<"actors">>, [<<"pivotal">>]}, {<<"groups">>, []}]}}]}, AclChecks = [ fun(_OrgId, _AuthzId, _Type, _AclPerm, _Ace) -> {true, failure_message_here} end ], Test1 = ?_assertEqual([failure_message_here], oc_chef_authz_acl_constraints:check_acl_constraints(?ORGID, AuthzId, Type, AclPerm, Ace, AclChecks)), AclChecks2 = [ @@ -116,7 +116,7 @@ check_acl_constraints_not_grant_ace() -> AuthzId = <<"10000000000000000000000000000000">>, Type = organization, AclPerm = <<"create">>, - Ace = {[{<<"create">>,{[{<<"actors">>,[<<"pivotal">>]},{<<"groups">>,[]}]}}]}, + Ace = {[{<<"create">>, {[{<<"actors">>, [<<"pivotal">>]}, {<<"groups">>, []}]}}]}, [ ?_assertEqual(ok, oc_chef_authz_acl_constraints:check_acl_constraints(?ORGID, AuthzId, Type, AclPerm, Ace, oc_chef_authz_acl_constraints:acl_checks())) ]. diff --git a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_tests.erl b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_tests.erl index e6a3542507..97110862b4 100644 --- a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_tests.erl +++ b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_acl_tests.erl @@ -33,12 +33,12 @@ safe_fetch_ids_test_() -> fun(_) -> meck:unload(oc_chef_authz_db) end, [ {"valid: the list of IDs are returned", - ?_assertEqual([<<"id1">>,<<"id2">>], - Subject(client, Context, [<<"name1">>,<<"name2">>])) + ?_assertEqual([<<"id1">>, <<"id2">>], + Subject(client, Context, [<<"name1">>, <<"name2">>])) }, {"invalid: an error is raised because a name is missing from the response", ?_assertThrow({invalid, user, [<<"name3">>]}, - Subject(user, Context, [<<"name1">>,<<"name2">>, <<"name3">>])) + Subject(user, Context, [<<"name1">>, <<"name2">>, <<"name3">>])) } ] }. @@ -150,8 +150,8 @@ valid_actors_only_ej() -> { [{<<"read">>, {[ - {<<"groups">>,[<<"x">>]}, - {<<"actors">>,[<<"a">>, <<"b">>, <<"c">>]} + {<<"groups">>, [<<"x">>]}, + {<<"actors">>, [<<"a">>, <<"b">>, <<"c">>]} ]} }] }. @@ -160,10 +160,10 @@ valid_actors_users_clients_ej() -> { [{<<"read">>, {[ - {<<"actors">>,[]}, - {<<"groups">>,[<<"x">>]}, - {<<"users">>,[<<"a">>,<<"b">>]}, - {<<"clients">>,[<<"c">>]} + {<<"actors">>, []}, + {<<"groups">>, [<<"x">>]}, + {<<"users">>, [<<"a">>, <<"b">>]}, + {<<"clients">>, [<<"c">>]} ]} }] }. @@ -171,10 +171,10 @@ invalid_actors_users_clients_ej() -> { [{<<"read">>, {[ - {<<"actors">>,[<<"a">>,<<"b">>, <<"c">>]}, - {<<"groups">>,[<<"x">>]}, - {<<"users">>,[<<"a">>,<<"b">>]}, - {<<"clients">>,[<<"c">>]} + {<<"actors">>, [<<"a">>, <<"b">>, <<"c">>]}, + {<<"groups">>, [<<"x">>]}, + {<<"users">>, [<<"a">>, <<"b">>]}, + {<<"clients">>, [<<"c">>]} ]} }] }. @@ -183,9 +183,9 @@ invalid_clients_only_ej() -> { [{<<"read">>, {[ - {<<"actors">>,[]}, - {<<"groups">>,[<<"x">>]}, - {<<"clients">>,[<<"c">>]} + {<<"actors">>, []}, + {<<"groups">>, [<<"x">>]}, + {<<"clients">>, [<<"c">>]} ]} }] }. @@ -194,9 +194,9 @@ invalid_users_only_ej() -> { [{<<"read">>, {[ - {<<"actors">>,[]}, - {<<"groups">>,[<<"x">>]}, - {<<"users">>,[<<"a">>,<<"b">>]} + {<<"actors">>, []}, + {<<"groups">>, [<<"x">>]}, + {<<"users">>, [<<"a">>, <<"b">>]} ]} }] }. diff --git a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_test_utils.erl b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_test_utils.erl index 29e89bd577..9e04f4f7ea 100644 --- a/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_test_utils.erl +++ b/src/oc_erchef/apps/oc_chef_authz/test/oc_chef_authz_test_utils.erl @@ -39,7 +39,7 @@ test_setup() -> application:set_env(oc_chef_authz, http_pool, [{oc_chef_authz_test_pool, ?pool_opts}]), - Server = {context,<<"test-req-id">>,{server,"localhost",5984,[],[]}}, + Server = {context,<<"test-req-id">>, {server,"localhost",5984,[],[]}}, Superuser = <<"cb4dcaabd91a87675a14ec4f4a00050d">>, {Server, Superuser}. diff --git a/src/oc_erchef/apps/oc_chef_wm/itest/oc_chef_wm_keys_SUITE.erl b/src/oc_erchef/apps/oc_chef_wm/itest/oc_chef_wm_keys_SUITE.erl index 1c59e9c364..57f3cdb24a 100644 --- a/src/oc_erchef/apps/oc_chef_wm/itest/oc_chef_wm_keys_SUITE.erl +++ b/src/oc_erchef/apps/oc_chef_wm/itest/oc_chef_wm_keys_SUITE.erl @@ -45,10 +45,10 @@ -define(ORG_KEYS_ACCESS_GROUP_AUTHZ_ID, <<"20000000000000000000000000000002">>). -define(KEY1NAME, <<"key1">>). --define(KEY1EXPIRE, {datetime, {{2099,12,31},{00,00,00}}}). +-define(KEY1EXPIRE, {datetime, {{2099,12,31}, {00,00,00}}}). -define(KEY1EXPIRESTRING, <<"2099-12-31T00:00:00Z">>). -define(KEY2NAME, <<"key2">>). --define(KEY2EXPIRE, {datetime, {{2010,12,31},{00,00,00}}}). +-define(KEY2EXPIRE, {datetime, {{2010,12,31}, {00,00,00}}}). -define(KEY2EXPIRESTRING, <<"2010-12-31T00:00:00Z">>). -define(DEFAULT_KEY_ENTRY, {<<"default">>, false}). @@ -56,7 +56,7 @@ -define(KEY_2_ENTRY, { ?KEY2NAME, true } ). init_per_suite(LastConfig) -> - Config = chef_test_db_helper:start_db([{app, oc_chef_wm}|LastConfig], "oc_chef_wm_itests"), + Config = chef_test_db_helper:start_db([{app, oc_chef_wm} | LastConfig], "oc_chef_wm_itests"), Config2 = setup_helper:start_server(Config), make_org(?ORG_NAME, ?ORG_AUTHZ_ID), {OrgId, _} = chef_db:fetch_org_metadata(context(), ?ORG_NAME), @@ -68,7 +68,7 @@ init_per_suite(LastConfig) -> {ok, AltPubKey} = file:read_file(AltPubKeyFile), {ok, PrivateKeyRE} = re:compile(".*BEGIN (RSA )?PRIVATE KEY.*"), {ok, PubKeyRE} = re:compile(".*BEGIN (RSA )?PUBLIC KEY.*"), - [{org_id, OrgId}, {pubkey, PubKey}, {alt_pubkey, AltPubKey},{pubkey_regex, PubKeyRE}, {privkey_regex, PrivateKeyRE}] ++ Config2. + [{org_id, OrgId}, {pubkey, PubKey}, {alt_pubkey, AltPubKey}, {pubkey_regex, PubKeyRE}, {privkey_regex, PrivateKeyRE}] ++ Config2. end_per_suite(Config) -> setup_helper:base_end_per_suite(Config). diff --git a/src/oc_erchef/apps/oc_chef_wm/itest/setup_helper.erl b/src/oc_erchef/apps/oc_chef_wm/itest/setup_helper.erl index 6afd35f7e0..74bab46e91 100644 --- a/src/oc_erchef/apps/oc_chef_wm/itest/setup_helper.erl +++ b/src/oc_erchef/apps/oc_chef_wm/itest/setup_helper.erl @@ -157,7 +157,7 @@ base_init_per_suite(Config0) -> AuthzId = get_config(authz_id, Config0), ClientName = get_config(client_name, Config0), - Config1 = chef_test_db_helper:start_db([{app, oc_chef_wm}|Config0], ?TEST_DB_NAME), + Config1 = chef_test_db_helper:start_db([{app, oc_chef_wm} | Config0], ?TEST_DB_NAME), Config2 = start_server(Config1), FakeContext = chef_db:make_context(?API_MIN_VER, <<"fake-req-id">>), diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_reindex.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_reindex.erl index b62f102e03..0034a5b8dc 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_reindex.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_reindex.erl @@ -152,7 +152,7 @@ reindex_by_name(Ctx, {OrgId, OrgName} = OrgInfo, Index, Names) -> %% The lager warning does not print anything on the console {Acc, [Name | Missing]} end - end, {[],[]}, Names), + end, {[], []}, Names), lager:debug("Ids that will be reindexed: ~p ~n", [Ids]), lager:debug("Ids that are missing: ~p ~n", [MissingList]), {ok, BatchSize} = application:get_env(oc_chef_wm, reindex_batch_size), @@ -166,7 +166,7 @@ reindex_by_name(Ctx, {OrgId, OrgName} = OrgInfo, Index, Names) -> end. all_ids_from_name_id_dict(NameIdDict) -> - dict:fold(fun(_K, V, Acc) -> [V|Acc] end, + dict:fold(fun(_K, V, Acc) -> [V | Acc] end, [], NameIdDict). @@ -262,7 +262,7 @@ add_batch(Batch, OrgName) -> log_failures(_OrgName, []) -> ok; -log_failures(OrgName, [Failure|Rest]) -> +log_failures(OrgName, [Failure | Rest]) -> case Failure of {{TypeName, Id, _DbName}, Reason} -> lager:error("reindexing[~s] item ~s[~s] failed to reindex: ~s", [OrgName, TypeName, Id, Reason]); @@ -274,13 +274,13 @@ log_failures(OrgName, [Failure|Rest]) -> -spec humanize_failures(list(), list()) -> list(). humanize_failures([], Acc) -> Acc; -humanize_failures([H|T], Acc) -> +humanize_failures([H | T], Acc) -> case H of {Id, Reason} -> humanize_failures(T, [{Id, pretty_reason(Reason)} | Acc]); Error -> humanize_failures(T, [{<<"unknown">>, pretty_reason(Error)} | Acc]) end. -pretty_reason({error,{error,no_members}}) -> +pretty_reason({error, {error, no_members}}) -> "no_members: Ran out of HTTP workers talking to search backend"; pretty_reason({exit, {timeout, _}}) -> "timeout"; @@ -298,14 +298,14 @@ stub_records_for_indexing(SerializedObjects, NameKey, NameIdDict, Index, OrgId) stub_records_for_indexing([], _NameKey, _NameIdDict, _Index, _OrgId, ExistingAcc, MissingAcc) -> {ExistingAcc, MissingAcc}; -stub_records_for_indexing([SO|Rest], NameKey, NameIdDict, Index, OrgId, ExistingAcc, MissingAcc) -> +stub_records_for_indexing([SO | Rest], NameKey, NameIdDict, Index, OrgId, ExistingAcc, MissingAcc) -> PreliminaryEJson = decompress_and_decode(SO), ItemName = ej:get({NameKey}, PreliminaryEJson), {NewEAcc, NewMAcc} = case dict:find(ItemName, NameIdDict) of {ok, ObjectId} -> StubRec = stub_record(Index, OrgId, ObjectId, ItemName, PreliminaryEJson), - {[{StubRec, PreliminaryEJson}|ExistingAcc], MissingAcc}; + {[{StubRec, PreliminaryEJson} | ExistingAcc], MissingAcc}; error -> lager:warning("skipping: no id found for name ~p", [ItemName]), {ExistingAcc, [{Index, ItemName} | MissingAcc]} diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm.erl index d9f7354723..d25cd7a6be 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm.erl @@ -23,7 +23,7 @@ -type http_verb() :: 'GET' | 'PUT' | 'POST' | 'DELETE' | 'HEAD' | 'OPTIONS'. -type base_state() :: #base_state{}. --export_type([http_verb/0, base_state/0, auth_info_return/0,container_name/0]). +-export_type([http_verb/0, base_state/0, auth_info_return/0, container_name/0]). -callback init(list()) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_cookbooks.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_cookbooks.erl index e41911801a..569f4b216c 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_cookbooks.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_cookbooks.erl @@ -169,9 +169,9 @@ aggregate_versions(CookbookVersions) -> %% and a per-version map containing the version and the URL for the version %% %% @end --spec make_version_list(CookbookVersionFun::fun(), - Versions::[ {binary(), version()}], - NumVersions:: non_neg_integer() | all) -> +-spec make_version_list(CookbookVersionFun :: fun(), + Versions :: [ {binary(), version()}], + NumVersions :: non_neg_integer() | all) -> list(). make_version_list(CookbookVersionFun, Versions, NumVersions) -> TrimmedVersions = case NumVersions of @@ -184,9 +184,9 @@ make_version_list(CookbookVersionFun, Versions, NumVersions) -> {<<"url">>, CookbookVersionFun(VersionBinary) }]} end || Version <- TrimmedVersions]. --spec make_cookbook_list(Req::#wm_reqdata{}, - Cookbooks:: [ { binary(), [ {version()} ] } ], - NumVersions:: all | non_neg_integer()) -> +-spec make_cookbook_list(Req :: #wm_reqdata{}, + Cookbooks :: [ { binary(), [ {version()} ] } ], + NumVersions :: all | non_neg_integer()) -> list(). make_cookbook_list(Req, Cookbooks, NumVersions) -> [ begin diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_data.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_data.erl index 4f589562a8..7db669d3bb 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_data.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_data.erl @@ -65,7 +65,7 @@ request_type() -> "data". allowed_methods(Req, State) -> - {['GET','POST'], Req, State}. + {['GET', 'POST'], Req, State}. -spec validate_request(chef_wm:http_verb(), wm_req(), chef_wm:base_state()) -> {wm_req(), chef_wm:base_state()}. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_depsolver.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_depsolver.erl index 99bf0d00fb..57699e29b8 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_depsolver.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_depsolver.erl @@ -138,12 +138,12 @@ process_post(Req, #base_state{reqid = ReqId, %% We expand the runlist to a set of cookbooks with dups removed. If %% a versioned recipe is provided in the runlist we return it as tuple %% of {cookbook_name, version} --spec cookbooks_for_runlist(Runlist::[binary()]) -> [cookbook_with_version()]. +-spec cookbooks_for_runlist(Runlist :: [binary()]) -> [cookbook_with_version()]. cookbooks_for_runlist(Runlist) -> Cookbooks = [ cookbook_for_recipe(split_version(Item)) || Item <- Runlist ], remove_dups(Cookbooks). --spec split_version(Recipe::binary()) -> cookbook_with_version(). +-spec split_version(Recipe :: binary()) -> cookbook_with_version(). split_version(Recipe) when is_binary(Recipe) -> case re:split(Recipe, <<"@">>) of [Name] -> @@ -184,7 +184,7 @@ remove_dups(L) -> %% better to loop over the list of Cookbooks instead -spec not_found_cookbooks(AllVersions :: [chef_depsolver:dependency_set()], Cookbooks :: [cookbook_with_version()]) -> - ok | {not_found, [binary(),...]}. + ok | {not_found, [binary(), ...]}. not_found_cookbooks(AllVersions, Cookbooks) -> NotFound = [ cookbook_name(Cookbook) || Cookbook <- Cookbooks, cookbook_missing(Cookbook, AllVersions)], case NotFound of @@ -207,8 +207,8 @@ cookbook_name({Name, _Version}) -> %% In order to work in the same manner as the ruby code it will only check for a %% cookbook name in the list of all cookbook version. This means if any version of a cookbook %% exists it returns false --spec cookbook_missing(CB::cookbook_with_version(), - AllVersions::[chef_depsolver:dependency_set()]) -> boolean(). +-spec cookbook_missing(CB :: cookbook_with_version(), + AllVersions :: [chef_depsolver:dependency_set()]) -> boolean(). cookbook_missing(CB, AllVersions) when is_binary(CB) -> not proplists:is_defined(CB, AllVersions); cookbook_missing({Name, _Version}, AllVersions) -> @@ -217,7 +217,7 @@ cookbook_missing({Name, _Version}, AllVersions) -> %% @doc Given the output from not_found_cookbooks/2 and %% chef_depsolver:solve_dependencies/3, format an appropriate response %% document -handle_depsolver_results({not_found, CookbookNames}, _Deps, Req, State) when is_list(CookbookNames)-> +handle_depsolver_results({not_found, CookbookNames}, _Deps, Req, State) when is_list(CookbookNames) -> precondition_failed(Req, State, not_found_message(cookbook_version, CookbookNames), cookbook_version_not_found); @@ -315,7 +315,7 @@ make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts) -> % It is not in the cache and nobody is working on it. Stake our claim and % do the work. case chef_cbv_cache:claim(Key) of - Response when Response =:= undefined orelse Response =:= ok-> + Response when Response =:= undefined orelse Response =:= ok -> %% We iterate over the list again since we only want to construct the s3urls %% if the authz check has succeeded (in caller). We respond with a minimal version of the %% cookbook which has just enough information for chef-client to run @@ -401,12 +401,12 @@ not_reachable_message(CookbookName) -> ", which does not exist."]), {[{<<"message">>, Reason}, {<<"non_existent_cookbooks">>, [ CookbookName ]}, - {<<"most_constrained_cookbooks">>,[]}]}. + {<<"most_constrained_cookbooks">>, []}]}. timeout_message() -> {[{<<"message">>, <<"unable to solve dependencies in alotted time">>}, {<<"non_existent_cookbooks">>, []}, - {<<"most_constrained_cookbooks">>,[]}]}. + {<<"most_constrained_cookbooks">>, []}]}. %%------------------------------------------------------------------------------ %% Miscellaneous Utilities @@ -414,17 +414,17 @@ timeout_message() -> %% Helpers to construct pieces of error messages from lists of %% cookbook names --spec bin_str_join(Names::[binary()], - Sep::<<_:8,_:_*8>>, - Acc::[binary()]) -> [binary()]. +-spec bin_str_join(Names :: [binary()], + Sep :: <<_:8, _:_*8>>, + Acc :: [binary()]) -> [binary()]. bin_str_join([], _Sep, Acc) -> Acc; bin_str_join([H], _Sep, Acc) -> [H | Acc]; -bin_str_join([Name| Rest], Sep, Acc) -> +bin_str_join([Name | Rest], Sep, Acc) -> bin_str_join(Rest, Sep, [Sep , Name | Acc]). --spec bin_str_join(Names::[binary()], Sep::<<_:8,_:_*8>>) -> binary(). +-spec bin_str_join(Names :: [binary()], Sep :: <<_:8, _:_*8>>) -> binary(). bin_str_join(Names, Sep) -> Reverse = lists:reverse(Names), list_to_binary(bin_str_join(Reverse, Sep, [])). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_enforce.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_enforce.erl index aa68677f89..d023ca810a 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_enforce.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_enforce.erl @@ -42,7 +42,7 @@ max_size(Req) -> disabled -> Req; TunedMaxSize -> - max_size(wrq:method(Req),Req, TunedMaxSize) + max_size(wrq:method(Req), Req, TunedMaxSize) end. max_size(Method, Req, MaxSize) when Method =:= 'POST'; diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_malformed.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_malformed.erl index 2837a39c61..fd44543af3 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_malformed.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_malformed.erl @@ -166,7 +166,7 @@ malformed_request_message(#ej_invalid{type=json_type, %% Entire run list is the wrong type malformed_request_message(#ej_invalid{type=json_type, key=Key}, _Req, _State) when Key =:= <<"run_list">> -> - error_envelope([<<"Field '", Key/binary,"' is not a valid run list">>]); + error_envelope([<<"Field '", Key/binary, "' is not a valid run list">>]); %% entire env_run_lists is the wrong type malformed_request_message(#ej_invalid{type=json_type, @@ -215,7 +215,7 @@ bin_str_join(L, Sep) -> bin_str_join(L, Sep, []). bin_str_join([H], _Sep, Acc) -> - lists:reverse([<<"'">>, H, <<"'">>|Acc]); + lists:reverse([<<"'">>, H, <<"'">> | Acc]); bin_str_join([H | T], Sep, Acc) -> bin_str_join(T, Sep, [Sep, <<"'">>, H, <<"'">> | Acc]). @@ -223,9 +223,9 @@ bin_str_join([H | T], Sep, Acc) -> -spec to_binary( any() ) -> binary(). to_binary(A) when is_atom(A) -> atom_to_binary(A, utf8); -to_binary(I) when is_integer(I)-> +to_binary(I) when is_integer(I) -> list_to_binary(integer_to_list(I)); -to_binary(B) when is_binary(B)-> +to_binary(B) when is_binary(B) -> B; to_binary(O) -> %% Catch-all case diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_data.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_data.erl index 98d5873b22..ddfb416b0d 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_data.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_data.erl @@ -134,11 +134,11 @@ create_path(Req, #base_state{resource_state = #data_state{ {binary_to_list(ItemName), Req, State}. --spec from_json(#wm_reqdata{}, #base_state{}) -> {boolean()|{halt,409|500}, #wm_reqdata{}, #base_state{}}. +-spec from_json(#wm_reqdata{}, #base_state{}) -> {boolean() | {halt, 409 | 500}, #wm_reqdata{}, #base_state{}}. from_json(Req, #base_state{resource_state = #data_state{data_bag_name = DataBagName, data_bag_item_ejson = ItemData} } = State) -> oc_chef_wm_base:create_from_json(Req, State, chef_data_bag_item, - {authz_id,undefined}, {DataBagName, ItemData}). + {authz_id, undefined}, {DataBagName, ItemData}). % Callback from create_from_json, which allows us to customize our body response. finalize_create_body(_Req, #base_state{ resource_state = #data_state{data_bag_name = DataBagName, diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_principal.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_principal.erl index b9fe0fd047..448ced5b96 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_principal.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_principal.erl @@ -85,7 +85,7 @@ resource_exists(Req, #base_state{server_api_version = ?API_v0, {not_found, client} -> Req1 = chef_wm_util:set_json_body(Req, not_found_ejson(<<"principal">>, Name)), {false, Req1, State#base_state{log_msg = client_not_found}}; - Response-> + Response -> PrincipalState = make_principal_state(Response), {true, Req, State#base_state{resource_state = PrincipalState}} end; diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_sandbox.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_sandbox.erl index 8bc561db7f..817914af05 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_sandbox.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_named_sandbox.erl @@ -191,7 +191,7 @@ as_binary(B) when is_binary(B) -> timestamp(now) -> timestamp(os:timestamp()); -timestamp({_,_,_} = TS) -> - {{Year,Month,Day},{Hour,Minute,Second}} = calendar:now_to_universal_time(TS), +timestamp({_, _, _} = TS) -> + {{Year, Month, Day}, {Hour, Minute, Second}} = calendar:now_to_universal_time(TS), iolist_to_binary(io_lib:format("~4w-~2..0w-~2..0wT~2..0w:~2..0w:~2..0w+00:00", [Year, Month, Day, Hour, Minute, Second])). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_roles.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_roles.erl index e5c23770e3..059b0b5299 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_roles.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_roles.erl @@ -62,7 +62,7 @@ request_type() -> "roles". allowed_methods(Req, State) -> - {['GET','POST'], Req, State}. + {['GET', 'POST'], Req, State}. -spec validate_request(chef_wm:http_verb(), wm_req(), chef_wm:base_state()) -> {wm_req(), chef_wm:base_state()}. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_sandboxes.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_sandboxes.erl index 649f678866..6eae979315 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_sandboxes.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_sandboxes.erl @@ -75,8 +75,8 @@ create_path(Req, #base_state{organization_guid = OrgId, resource_state = SandboxState}=State) -> %% there is no name so to help with uniqueness, we take a digest of %% the content - Name_ish = erlang:md5(wrq:req_body(Req)), - Id = chef_object_base:make_org_prefix_id(OrgId, Name_ish), + NameIsh = erlang:md5(wrq:req_body(Req)), + Id = chef_object_base:make_org_prefix_id(OrgId, NameIsh), SandboxState1 = SandboxState#sandbox_state{id = Id}, {binary_to_list(Id), Req, State#base_state{resource_state = SandboxState1}}. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search.erl index bcd0125087..2d5e0fac2b 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search.erl @@ -68,7 +68,7 @@ request_type() -> "search". allowed_methods(Req, State) -> - {['GET','POST'], Req, State}. + {['GET', 'POST'], Req, State}. -spec validate_request(chef_wm:http_verb(), wm_req(), chef_wm:base_state()) -> {wm_req(), chef_wm:base_state()}. @@ -418,7 +418,7 @@ fetch_result_rows({Ids, Rest}, BatchSize, BulkGetFun, {N, Acc}) -> %% a dangling comma and strip it out. Completely ugly since %% we're assuming the separator is a comma. %% FIXME Refactor into a more readable/understandable design -encode_results([], [<<",">>|Acc]) -> +encode_results([], [<<",">> | Acc]) -> Acc; encode_results([], Acc) -> Acc; @@ -437,7 +437,7 @@ encode_results(Results, Prefix, Acc) -> %% This function knows how to deal with gzip binary from SQL and with EJSON data coming %% straight from couch. If the data has come from couch, this is where couch cruft keys _id %% and _rev are removed. -encode_result_rows([Item|_Rest]=Items) when is_binary(Item) -> +encode_result_rows([Item | _Rest]=Items) when is_binary(Item) -> ItemList = << <<(chef_db_compression:decompress(Bin))/binary, ",">> || Bin <- Items >>, %% remove trailing "," from binary binary:part(ItemList, {0, size(ItemList) - 1}); @@ -456,13 +456,13 @@ remove_couchdb_keys([]) -> remove_couchdb_keys(L) -> remove_couchdb_keys(L, 0). -remove_couchdb_keys([{Key, _}|T], N) when Key =:= <<"_rev">>; - Key =:= <<"_id">> -> - remove_couchdb_keys(T, N+1); +remove_couchdb_keys([{Key, _} | T], N) when Key =:= <<"_rev">>; + Key =:= <<"_id">> -> + remove_couchdb_keys(T, N + 1); remove_couchdb_keys(L, N) when N > 1 -> L; -remove_couchdb_keys([H|T], N) -> - [H|remove_couchdb_keys(T, N)]; +remove_couchdb_keys([H | T], N) -> + [H | remove_couchdb_keys(T, N)]; remove_couchdb_keys([], _) -> []. @@ -484,7 +484,7 @@ search_result_start(Start, Total) -> search_result_finish(Result) -> %% Note that all we need here is an iolist not a flat binary. - lists:reverse([<<"]}">>|Result]). + lists:reverse([<<"]}">> | Result]). malformed_request_message(#ej_invalid{}, _Req, _State) -> Msg = <<"invalid partial search request body">>, diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search_index.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search_index.erl index 0a0d9da779..7e6e2b5537 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search_index.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_search_index.erl @@ -67,7 +67,7 @@ auth_info(Req, State) -> %% @doc Create a list of the various search indexes that the given organization has access %% to. This includes the four standard indexes (client, environment, node, and role), as %% well as an index for each of the organization's data bags. --spec generate_index_list(DBContext::tuple(), OrgId::binary()) -> IndexNames::list(binary()). +-spec generate_index_list(DBContext :: tuple(), OrgId :: binary()) -> IndexNames :: list(binary()). generate_index_list(DBContext, OrgId) -> %% The order of these lists doesn't really matter, given that they are ultimately %% destined to be keys in a map structure. In any event, the web UI currently orders @@ -87,8 +87,8 @@ generate_index_list(DBContext, OrgId) -> %% [{"nodes", "http://server.com/search/nodes"}, %% {"clients", "http://server.com/search/clients"}] %% --spec index_map(SearchIndexes::list(binary()), Req::tuple()) -> - list({IndexName::binary(), URL::binary()}). +-spec index_map(SearchIndexes :: list(binary()), Req :: tuple()) -> + list({IndexName :: binary(), URL :: binary()}). index_map(SearchIndexes, Req) -> [{Index, oc_chef_wm_routes:route(organization_search, Req, [{search_index, Index}])} || Index <- SearchIndexes]. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_stats.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_stats.erl index aff3321f27..edc9d1977c 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_stats.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_stats.erl @@ -47,7 +47,7 @@ content_types_provided(Req, State) -> % I think there is a bug in webmachine where it wont allow us to use % 'text/plain; version=0.0.4'. % TODO: Understand https://github.com/basho/webmachine/blob/develop/src/webmachine_util.erl#L140-L158 - {{"text/plain",[{"version","0.0.4"}]}, to_text}, + {{"text/plain", [{"version", "0.0.4"}]}, to_text}, {"text/plain", to_text}], case wrq:get_qs_value("format", Req) of undefined -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_status.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_status.erl index 454274be19..719727becd 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_status.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_status.erl @@ -93,7 +93,7 @@ log_failure(fail, Pings) -> FailureData = {{status, fail}, {upstreams, {Pings}}}, lager:error("/_status~n~p~n", [FailureData]), ok; -log_failure(_,_) -> +log_failure(_, _) -> ok. %% Execute health checks in parallel such that no check will exceed `ping_timeout()' @@ -183,4 +183,4 @@ ping_timeout() -> envy:get(oc_chef_wm, health_ping_timeout, pos_integer). ping_modules() -> - envy:get(oc_chef_wm, health_ping_modules,list). + envy:get(oc_chef_wm, health_ping_modules, list). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_universe.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_universe.erl index 7b955afe0f..c04a8727f2 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_universe.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_universe.erl @@ -69,15 +69,15 @@ to_json(Req, #base_state{chef_db_context = DbContext, Universe = make_universe(Req, DependencyList), {chef_json:encode({Universe}), Req, State}. --spec make_dependencies(Dependencies::[ {binary(), version(), binary()}]) -> list(). +-spec make_dependencies(Dependencies :: [ {binary(), version(), binary()}]) -> list(). make_dependencies(Dependencies) -> [ begin Spec = iolist_to_binary(io_lib:format("~s ~s", [Match, Version])), {Name, Spec} end || {Name, Version, Match} <- Dependencies]. --spec make_version_list(CookbookUrlFun::fun(), - Versions::[ {version(), [ {binary(), version(), binary()} ] } ]) -> +-spec make_version_list(CookbookUrlFun :: fun(), + Versions :: [ {version(), [ {binary(), version(), binary()} ] } ]) -> list(). make_version_list(CookbookUrlFun, Versions) -> [ begin @@ -90,7 +90,7 @@ make_version_list(CookbookUrlFun, Versions) -> } end || { Version, Dependencies} <- Versions]. --spec make_universe(Req::#wm_reqdata{}, DependencyList::[depsolver:dependency_set()]) -> +-spec make_universe(Req :: #wm_reqdata{}, DependencyList :: [depsolver:dependency_set()]) -> list(). make_universe(Req, DependencyList) -> [ begin diff --git a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_util.erl b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_util.erl index d9847c3242..d2eb81cb59 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_util.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_util.erl @@ -330,5 +330,5 @@ scheme(Req) -> port_string(Default) when Default =:= 80; Default =:= 443 -> ""; port_string(Port) -> - [$:|erlang:integer_to_list(Port)]. + [$: | erlang:integer_to_list(Port)]. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_action.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_action.erl index 593f416a4b..5cf0e26b6f 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_action.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_action.erl @@ -41,11 +41,11 @@ create_message(Req, #base_state{resource_state = ResourceState} = State) -> %% %% Internal functions %% --spec construct_payload(FullActionPayload :: [{binary(), binary()},...], +-spec construct_payload(FullActionPayload :: [{binary(), binary()}, ...], Task :: binary(), Req :: wm_req(), State :: #base_state{}, - EntitySpecificPayload :: [{binary(), binary()},...]) -> binary(). + EntitySpecificPayload :: [{binary(), binary()}, ...]) -> binary(). construct_payload(FullActionPayload, Task, Req, #base_state{ requestor = #chef_client{name = Name} } = State, EntitySpecificPayload) -> @@ -260,10 +260,10 @@ task(Req, #base_state{resource_state=#association_state{}, log_msg = LogMsg}) -> invite_deleted -> <<"reject">> end end; -task(Req, _State)-> +task(Req, _State) -> key_for_method(wrq:method(Req)). --spec key_for_method('POST'|'PUT'|'DELETE') -> <<_:48>>. +-spec key_for_method('POST' | 'PUT' | 'DELETE') -> <<_:48>>. key_for_method('DELETE') -> <<"delete">>; key_for_method('POST') -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_associations.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_associations.erl index 2806d15766..087a8633b7 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_associations.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_associations.erl @@ -21,12 +21,12 @@ error_removing_from_org_user_group. -type deprovision_warning() :: usag_record_delete_failed | org_admin_group_fetch_failed | org_admin_ace_removal_failed. --type deprovision_error_tuple() :: {error, { deprovision_error(),term()}}. +-type deprovision_error_tuple() :: {error, { deprovision_error(), term()}}. -type deprovision_warning_msg() :: { deprovision_warning(), term() }. -type deprovision_warning_tuple() :: {warning, [ deprovision_warning_msg(), ... ] }. -type deprovision_response() :: ok | deprovision_warning_tuple() | deprovision_error_tuple(). --type provision_error() :: usag_authz_creation_failed | usag_creation_failed |fetch_org_users_group_failed | +-type provision_error() :: usag_authz_creation_failed | usag_creation_failed | fetch_org_users_group_failed | usag_update_failed | add_usag_to_org_users_group_failed | fetch_org_users_group_by_name_failed. -type provision_warning() :: fetch_org_admins_failed | add_read_ace_for_admins_failed. -type provision_error_tuple() :: {error, { provision_error(), term()}}. @@ -56,7 +56,7 @@ wm_associate_user(Req, #base_state{organization_guid = OrgId, username = UserName}, data = ReqData}} = State, RequestorId) -> - Data = ej:set({<<"user">>},ReqData,UserId), + Data = ej:set({<<"user">>}, ReqData, UserId), ObjectRec = chef_object:new_record(oc_chef_org_user_association, ApiVersion, OrgId, unset, Data), case chef_db:create(ObjectRec, DbContext, RequestorId) of {conflict, _} -> @@ -130,7 +130,7 @@ deprovision_fetch_users_group(#oc_chef_group{} = USAG, #context{ db_context = Db for_requestor_id = RequestorAuthzId }, DbContext), deprovision_remove_usag_from_users(Result, Context#context{usag = USAG}); deprovision_fetch_users_group(Error, _Context) -> - {error, {error_fetching_usag,Error}}. + {error, {error_fetching_usag, Error}}. deprovision_remove_usag_from_users(#oc_chef_group{} = OrgUsersGroup, #context{usag = USAG, diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_data_collector.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_data_collector.erl index 069a36559c..c83e8c1a45 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_data_collector.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_data_collector.erl @@ -57,7 +57,7 @@ notify(_Req, _State) -> -spec req_success(Req :: wm_req()) -> boolean(). req_success(Req) -> case Code = integer_to_list(wrq:response_code(Req)) of - Code = [$2|_] -> + Code = [$2 | _] -> true; _ -> false diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_object_db.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_object_db.erl index 616038aa35..8084103193 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_object_db.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_object_db.erl @@ -21,7 +21,7 @@ reqid :: binary(), darklaunch = undefined}). --type delete_type() ::chef_object() | +-type delete_type() :: chef_object() | #oc_chef_container{} | #oc_chef_group{} | #chef_cookbook_version{} | @@ -67,7 +67,7 @@ delete(DbContext, #chef_data_bag{org_id = OrgId, %% can remove them from Solr as well; a cascade-on-delete foreign key takes care of the %% data bag item deletion from the database DataBagItemIds = case chef_db:fetch_data_bag_item_ids(DbContext, OrgId, DataBagName) of - Items when is_list(Items)-> + Items when is_list(Items) -> Items; _Other -> [] @@ -190,6 +190,6 @@ bulk_delete_from_solr(Type, Ids, OrgId, ReqId) -> [ chef_index:delete(Type, Id, OrgId, ReqId) || Id <- Ids ], ok. --spec dbname(binary()) -> <<_:40,_:_*8>>. +-spec dbname(binary()) -> <<_:40, _:_*8>>. dbname(OrgId) -> <<"chef_", OrgId/binary>>. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_authn_ldap.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_authn_ldap.erl index 52a8e3b4b5..b6709d931b 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_authn_ldap.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_authn_ldap.erl @@ -96,7 +96,7 @@ find_and_authenticate_user(Session, User, Password, Config) -> GroupDN = proplists:get_value(group_dn, Config, ""), Filter = case GroupDN of "" -> {filter, eldap:equalityMatch(LoginAttr, User)}; - _ -> {filter,eldap:'and'([eldap:equalityMatch(LoginAttr, User), eldap:equalityMatch("memberOf",GroupDN)])} + _ -> {filter, eldap:'and'([eldap:equalityMatch(LoginAttr, User), eldap:equalityMatch("memberOf", GroupDN)])} end, % Auth so we can search for the user @@ -181,7 +181,7 @@ canonical_username(Username) -> result_to_user_ejson(_, UserName, []) -> lager:info("User ~p not found in LDAP", [UserName]), {error, unauthorized}; -result_to_user_ejson(LoginAttr, UserName, [{eldap_entry, CN, DataIn}|_]) -> +result_to_user_ejson(LoginAttr, UserName, [{eldap_entry, CN, DataIn} | _]) -> % No guarantees on casing, so let's not make assumptions: Data = [ { string:to_lower(Key), Value} || {Key, Value} <- DataIn ], @@ -192,7 +192,7 @@ result_to_user_ejson(LoginAttr, UserName, [{eldap_entry, CN, DataIn}|_]) -> % loginattr was used to find this record, so we know it must exist; % however, multiple LoginAttr fields may exist in the LDAP record, take % the first - [CanonicalUserName|_] = [ canonical_username(U) || U <- proplists:get_value(LCLoginAttr, Data) ], + [CanonicalUserName | _] = [ canonical_username(U) || U <- proplists:get_value(LCLoginAttr, Data) ], % If you are debugging an issue where a new user has authenticated successfully % via opscode-manage , but received an odd 400 message when trying to create a @@ -231,7 +231,7 @@ close(_) -> ok. value_of(Key, Data, Default) -> - [R|_] = proplists:get_value(Key, Data, [Default]), + [R | _] = proplists:get_value(Key, Data, [Default]), characters_to_binary(R). characters_to_binary(Characters) when is_list(Characters) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_base.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_base.erl index 46301e8b87..fad092c2b8 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_base.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_base.erl @@ -89,15 +89,15 @@ service_available(Req, #base_state{reqid_header_name = HeaderName} = State) -> {true, Req, State3}; _ -> case chef_license_worker:get_license() of - {valid_license, _, _, _, _, _,_} -> + {valid_license, _, _, _, _, _, _} -> {true, Req, State3}; - {commercial_grace_period, _, _, ExpDate, LicWarnMsg, _,_} -> - XOps = binary_to_list(chef_json:encode({[{<<"licenseType">>, <<"commercial">>},{<<"expirationDateTime">>, list_to_binary(ExpDate)}, + {commercial_grace_period, _, _, ExpDate, LicWarnMsg, _, _} -> + XOps = binary_to_list(chef_json:encode({[{<<"licenseType">>, <<"commercial">>}, {<<"expirationDateTime">>, list_to_binary(ExpDate)}, {<<"warningMessage">>, list_to_binary(LicWarnMsg)}, {<<"gracePeriod">>, true}]})), Req1 = wrq:set_resp_header("X-Ops-License", XOps, Req), {true, Req1, State3}; - {_, Type, _, ExpDate, LicWarnMsg, _,_} -> - XOps = binary_to_list(chef_json:encode({[{<<"licenseType">>, Type},{<<"expirationDateTime">>, list_to_binary(ExpDate)}, + {_, Type, _, ExpDate, LicWarnMsg, _, _} -> + XOps = binary_to_list(chef_json:encode({[{<<"licenseType">>, Type}, {<<"expirationDateTime">>, list_to_binary(ExpDate)}, {<<"warningMessage">>, list_to_binary(LicWarnMsg)}, {<<"gracePeriod">>, false}]})), Req1 = wrq:set_resp_header("X-Ops-License", XOps, Req), Req2 = chef_wm_util:set_json_body(Req1, {[{<<"error">>, list_to_binary(LicWarnMsg)}, @@ -113,7 +113,7 @@ service_available(Req, #base_state{reqid_header_name = HeaderName} = State) -> %% and reply with it as or reply with error if it's not valid. %% %% If X-Ops-Server-API-Version is not sent, the server assumes an API version of 0. --spec server_api_version(undefined|string()) -> api_version() | {error, string()}. +-spec server_api_version(undefined | string()) -> api_version() | {error, string()}. server_api_version(undefined) -> 0; server_api_version(RequestedVersion) -> @@ -209,7 +209,7 @@ forbidden(Req, #base_state{resource_mod = Mod} = State) -> {Req2, State2} = set_forbidden_msg(Req1, State1), {true, Req2, State2} end; - {AuthTuples, Req1, State1} when is_list(AuthTuples)-> + {AuthTuples, Req1, State1} when is_list(AuthTuples) -> MultiAuthResult = multi_auth_check(AuthTuples, Req1, State1), multi_auth_check_to_wm_response(MultiAuthResult); {authorized, Req1, State1} -> @@ -264,7 +264,7 @@ multi_auth_check_to_wm_response({Error, {AuthzObjectType, AuthzId, Permission}, multi_auth_check([], Req, State) -> %% Nothing left to check, must be OK {true, Req, State}; -multi_auth_check([CurrentTuple|Rest], Req, State) -> +multi_auth_check([CurrentTuple | Rest], Req, State) -> case auth_check(CurrentTuple, Req, State) of {true, UpdatedReq, UpdatedState} -> %% That one checked out; check the rest @@ -314,7 +314,7 @@ auth_check({actor, ObjectId, Permission}, Req, State) -> %% resource_state record using set_authz_id/3 (which knows how to deal %% with the different resource_state records). -spec create_in_container(container_name(), wm_req(), chef_wm:base_state()) -> - {true|false, wm_req(), chef_wm:base_state()}. + {true | false, wm_req(), chef_wm:base_state()}. create_in_container(client, Req, #base_state{chef_db_context = Ctx, organization_guid = OrgId, requestor = #chef_requestor{name = Name, type = <<"client">>}} = State) -> @@ -378,7 +378,7 @@ create_in_container(Container, Req, #base_state{requestor_id = RequestorId} = St wm_req(), chef_wm:base_state(), superuser | object_id()) -> - {true|false, + {true | false, wm_req(), chef_wm:base_state()}. do_create_in_container(Container, Req, @@ -455,7 +455,7 @@ is_authorized(Req, State) -> is_authorized(Req, State, Extractor) -> case verify_request_signature(Req, State, Extractor) of {true, Req1, State1} -> - case authorized_by_org_membership_check(Req1,State1) of + case authorized_by_org_membership_check(Req1, State1) of {false, Req2, State2} -> {{halt, 403}, Req2, State2}; {true, Req2, State2} -> @@ -502,7 +502,7 @@ authorized_by_org_membership_check(Req, #base_state{organization_name = OrgName, -spec set_forbidden_msg(atom(), wm_req(), chef_wm:base_state()) -> {wm_req(), chef_wm:base_state()}. -set_forbidden_msg(Perm, Req, State) when is_atom(Perm)-> +set_forbidden_msg(Perm, Req, State) when is_atom(Perm) -> Msg = iolist_to_binary(["missing ", atom_to_binary(Perm, utf8), " permission"]), set_custom_forbidden_msg(Msg, Req, State). @@ -529,7 +529,7 @@ delete_object(DbContext, Object, RequestorId) -> read_req_id(ReqHeaderName, Req) -> case wrq:get_req_header(ReqHeaderName, Req) of undefined -> - base64:encode(term_to_binary(make_ref())); + base64:encode(term_to_binary(make_ref(), [{minor_version, 1}])); HV -> iolist_to_binary(HV) end. @@ -642,7 +642,7 @@ set_authz_id(Id, #user_state{} = U, user) -> -spec check_cookbook_authz(Cookbooks :: [#chef_cookbook_version{}], Req :: wm_req(), State :: #base_state{}) -> - ok | {error, {[any(),...]}}. + ok | {error, {[any(), ...]}}. check_cookbook_authz(Cookbooks, _Req, #base_state{reqid = ReqId, requestor_id = RequestorId}) -> Resources = [{AuthzId, Name} || #chef_cookbook_version{name = Name, authz_id = AuthzId} <- Cookbooks], @@ -683,7 +683,7 @@ is_user_in_org(Type, DbContext, Name, OrgName) -> %% @doc Given a `{Mod, Fun}' tuple, generate a stats hero metric with a prefix appropriate %% for stats_hero aggregation. An error is thrown if `Mod' is unknown. This is where we %% encode the mapping of module to upstream label. --spec stats_hero_label({Mod::metric_module(), Fun::atom()}) -> <<_:16,_:_*8>>. +-spec stats_hero_label({Mod :: metric_module(), Fun :: atom()}) -> <<_:16, _:_*8>>. stats_hero_label({chef_sql, Fun}) -> chef_metrics:label(rdbms, {chef_sql, Fun}); stats_hero_label({oc_chef_authz, Fun}) -> @@ -1045,8 +1045,8 @@ verify_request_signature(Req, -spec create_from_json(Req :: #wm_reqdata{}, State :: #base_state{}, - RecType :: chef_object_name()| chef_cookbook_version | oc_chef_cookbook_artifact_version, - ContainerId :: object_id() | {authz_id, AuthzId::object_id() | undefined}, + RecType :: chef_object_name() | chef_cookbook_version | oc_chef_cookbook_artifact_version, + ContainerId :: object_id() | {authz_id, AuthzId :: object_id() | undefined}, ObjectEjson :: ejson_term() | binary() | {binary(), ejson_term()} | @@ -1099,7 +1099,7 @@ create_from_json(#wm_reqdata{} = Req, Uri = oc_chef_wm_routes:route(TypeName, Req, Args), BodyEJ0 = {[{<<"uri">>, Uri}]}, BodyEJ1 = call_if_exported(ResourceMod, finalize_create_body, [Req, State, ObjectRec, BodyEJ0], - fun(_,_,_,EJ) -> EJ end), + fun(_, _, _, EJ) -> EJ end), Req1 = chef_wm_util:set_json_body(Req, BodyEJ1), {true, chef_wm_util:set_location_of_created_resource(Uri, Req1), State#base_state{log_msg = LogMsg}}; What -> @@ -1140,7 +1140,7 @@ update_from_json(#wm_reqdata{} = Req, #base_state{reqid=ReqId, true -> State1 = State#base_state{log_msg = ignore_update_for_duplicate}, Body = call_if_exported(ResourceMod, finalize_update_body, [Req, State, ObjectEjson], - fun(_,_,EJ) -> EJ end), + fun(_, _, EJ) -> EJ end), {true, chef_wm_util:set_json_body(Req, Body), State1}; false -> case chef_db:update(ObjectRec, DbContext, ActorId) of @@ -1148,7 +1148,7 @@ update_from_json(#wm_reqdata{} = Req, #base_state{reqid=ReqId, IsRename = chef_object:name(OrigObjectRec) =/= chef_object:name(ObjectRec), Req1 = handle_rename(ObjectRec, Req, State, IsRename), Body = call_if_exported(ResourceMod, finalize_update_body, [Req, State, ObjectEjson], - fun(_,_,EJ) -> EJ end), + fun(_, _, EJ) -> EJ end), {true, chef_wm_util:set_json_body(Req1, Body), State}; not_found -> %% We will get this if no rows were affected by the query. This could @@ -1162,7 +1162,7 @@ update_from_json(#wm_reqdata{} = Req, #base_state{reqid=ReqId, {{halt, 404}, Req1, State1}; {conflict, _} -> Name = chef_object:name(ObjectRec), - RecType = erlang:element(1,ObjectRec), + RecType = erlang:element(1, ObjectRec), LogMsg = {RecType, name_conflict, Name}, ConflictMsg = ResourceMod:conflict_message(Name), {{halt, 409}, chef_wm_util:set_json_body(Req, ConflictMsg), @@ -1204,7 +1204,7 @@ verify_request_message(error_finding_user_or_client, User, _Org) -> {[{<<"error">>, [Msg]}]}; verify_request_message(bad_sig, User, _Org) -> Msg = iolist_to_binary([<<"Invalid signature for user or client '">>, - User,<<"'">>]), + User, <<"'">>]), {[{<<"error">>, [Msg]}]}; verify_request_message(ErrorType, User, Org) when ErrorType =:= not_associated_with_org orelse @@ -1353,7 +1353,7 @@ call_if_exported(Mod, FunName, Args, DefaultFun) -> %% Default route_args consist of {TypeName, [{name, ObjectName}] % %unless overridden by a resource module. -route_args(ObjectRec,_State) -> +route_args(ObjectRec, _State) -> TypeName = chef_object:type_name(ObjectRec), {TypeName, [{name, chef_object:name(ObjectRec)}]}. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_key_base.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_key_base.erl index 33734b68d6..58dd161926 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_key_base.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_key_base.erl @@ -33,7 +33,7 @@ update_object_embedded_key_data_v0(Req, State, ObjectRec, EJ) -> case chef_key_base:maybe_generate_key_pair(EJ) of keygen_timeout -> {{halt, 503}, Req, State#base_state{log_msg = keygen_timeout}}; - EJWithKeys-> + EJWithKeys -> oc_chef_wm_base:update_from_json(Req, State, ObjectRec, EJWithKeys) end. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_container.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_container.erl index 28f503bc55..e7bdd6406a 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_container.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_container.erl @@ -94,5 +94,5 @@ delete_resource(Req, #base_state{chef_db_context = DbContext, Ejson = oc_chef_container:assemble_container_ejson(Container), {true, chef_wm_util:set_json_body(Req, Ejson), State}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact.erl index ce7caa551d..4f5627c042 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact.erl @@ -97,5 +97,5 @@ to_json(Req, #base_state{resource_state = #cookbook_artifacts_state{ ), {jiffy:encode({ArtifactList}), Req, State}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact_version.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact_version.erl index 0316769ebf..3c61ded454 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact_version.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact_version.erl @@ -147,7 +147,7 @@ delete_resource(Req, #base_state{ Ejson = oc_chef_cookbook_artifact_version:to_json(CAVRec, ExternalUrl), {true, chef_wm_util:set_json_body(Req, Ejson), State}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). conflict_message(_Name) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_group.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_group.erl index 829a4bdcfb..f4350369ae 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_group.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_group.erl @@ -147,5 +147,5 @@ delete_resource(Req, #base_state{ Ejson = oc_chef_group:assemble_group_ejson(Group, OrgName), {true, chef_wm_util:set_json_body(Req, Ejson), State}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_organization.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_organization.erl index 15c01bb986..70be9d2009 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_organization.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_organization.erl @@ -164,5 +164,5 @@ delete_read_access_group(DbContext, AuthzContext, OrgName, RequestorId) -> ok end. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy.erl index 01265afc89..a4d7f23fdd 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy.erl @@ -111,7 +111,7 @@ to_json(Req, #base_state{chef_db_context = DbContext, RevisionsMap = build_revisions_map(Revisions, {[]}), {chef_json:encode(RevisionsMap), Req, State}. -build_revisions_map([Revision|Rest],EJSON) -> +build_revisions_map([Revision | Rest], EJSON) -> Updated = ej:set_p({"revisions", Revision}, EJSON, {[]}), build_revisions_map(Rest, Updated); build_revisions_map([], EJSON) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy_group.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy_group.erl index f067da84ea..f2154cfdb4 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy_group.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_policy_group.erl @@ -122,12 +122,12 @@ build_policy_revisions_json(Req, ReqId, DbContext, OrgId, PolicyGroupName) -> PolicyRevisionIDs -> URI = oc_chef_wm_routes:route(policy_group, Req, [{name, PolicyGroupName}]), - BaseEJSON = ej:set_p({"uri"}, {[{<<"policies">>,{[]}}]}, URI), + BaseEJSON = ej:set_p({"uri"}, {[{<<"policies">>, {[]}}]}, URI), EJSONWithPolicies = build_nested_list_data(PolicyRevisionIDs, BaseEJSON), chef_json:encode(EJSONWithPolicies) end. -build_nested_list_data([Row|Rest], EJSON) -> +build_nested_list_data([Row | Rest], EJSON) -> {_PolicyGroupName, PolicyName, RevisionID} = Row, NewEJSON = ej:set_p({"policies", PolicyName, "revision_id"}, EJSON, RevisionID), build_nested_list_data(Rest, NewEJSON); diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_user.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_user.erl index ed2aa576bc..348c3a84b3 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_user.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_user.erl @@ -331,5 +331,5 @@ conflict_message(Name) -> email_update_error_message() -> {[{<<"error">>, <<"Use chef-manage/Chef Manage to change user email">>}]}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_associations.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_associations.erl index 00fff1b12d..b96bfc5706 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_associations.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_associations.erl @@ -224,5 +224,5 @@ deprovision_user(Req, #base_state{organization_name = OrgName, {{halt, 500}, Req, State#base_state{log_msg = {error_in_deprovision, Error}}} end. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_invites.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_invites.erl index bb6bba906b..4099b1bc2d 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_invites.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_org_invites.erl @@ -120,7 +120,7 @@ create_path(Req, State) -> to_json(Req, #base_state{ organization_guid = OrgId, chef_db_context = DbContext } = State) -> case chef_db:list(#oc_chef_org_user_invite{org_id = OrgId}, DbContext) of - Invitations when is_list(Invitations)-> + Invitations when is_list(Invitations) -> EJson = oc_chef_org_user_invite:ejson_from_list(Invitations, <<"username">>), {chef_json:encode(EJson), Req, State}; Error -> @@ -199,7 +199,7 @@ invitation_response(Req, #base_state{ organization_name = OrgName, Req2 = chef_wm_util:set_json_body(Req1, {[{<<"uri">>, Uri}, OrgUser, Org, UserOut]}), {true, Req2, State#base_state{log_msg = LogMsg}}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). org_name_mismatch_message(OrgName, BadOrgName) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_organizations.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_organizations.erl index 8b89e9e583..16b0385c8f 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_organizations.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_organizations.erl @@ -204,7 +204,7 @@ maybe_create_client(true, Req, id = OrgId, name = OrgName }} = ResourceState} = State) -> - ClientName = <>, + ClientName = <>, ClientEJson = {[{<<"name">>, ClientName}, {<<"create_key">>, true}, {<<"validator">>, true}]}, diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policies.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policies.erl index ffba8bfecf..64002b91c9 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policies.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policies.erl @@ -86,13 +86,13 @@ build_base_policy_list_ejson(Req, PolicyRevisions) -> UriMap= [{Name, RouteFun(Name)} || {Name, _Rev} <- PolicyRevisions], build_nested_base_ejson(UriMap, {[]}). -build_nested_base_ejson([{PolicyName, URI}|Rest], EJSON) -> +build_nested_base_ejson([{PolicyName, URI} | Rest], EJSON) -> NewEJSON = ej:set_p({PolicyName, "uri"}, EJSON, URI), build_nested_base_ejson(Rest, NewEJSON); build_nested_base_ejson([], EJSON) -> EJSON. -build_policy_list_ejson([{PolicyName, RevisionID}|Rest], EJSON) -> +build_policy_list_ejson([{PolicyName, RevisionID} | Rest], EJSON) -> NewEJSON = ej:set_p({PolicyName, "revisions", RevisionID}, EJSON, {[]}), build_policy_list_ejson(Rest, NewEJSON); build_policy_list_ejson([], EJSON) -> diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_group_policy_rev.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_group_policy_rev.erl index db8d851470..a16d66067b 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_group_policy_rev.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_group_policy_rev.erl @@ -143,13 +143,13 @@ stash_permissions_objects_authz_ids(Halt, State) when is_tuple(Halt) -> State; stash_permissions_objects_authz_ids([], State) -> State; -stash_permissions_objects_authz_ids([{create_in_container, _C}|Rest], State) -> +stash_permissions_objects_authz_ids([{create_in_container, _C} | Rest], State) -> stash_permissions_objects_authz_ids(Rest, State); -stash_permissions_objects_authz_ids([{policy_group,AzID}|Rest], #base_state{resource_state = PolicyState} = State) -> +stash_permissions_objects_authz_ids([{policy_group, AzID} | Rest], #base_state{resource_state = PolicyState} = State) -> UpdatedPolicyState = PolicyState#policy_state{policy_group_authz_id = AzID}, UpdatedBaseState = State#base_state{resource_state = UpdatedPolicyState}, stash_permissions_objects_authz_ids(Rest, UpdatedBaseState); -stash_permissions_objects_authz_ids([{policy,AzID}|Rest], #base_state{resource_state = PolicyState} = State) -> +stash_permissions_objects_authz_ids([{policy, AzID} | Rest], #base_state{resource_state = PolicyState} = State) -> UpdatedPolicyState = PolicyState#policy_state{policy_authz_id = AzID}, UpdatedBaseState = State#base_state{resource_state = UpdatedPolicyState}, stash_permissions_objects_authz_ids(Rest, UpdatedBaseState). @@ -201,7 +201,7 @@ prereq_objects_to_permissions(PrereqObjects) -> prereq_objects_to_permissions([], PermissionsList) -> PermissionsList; -prereq_objects_to_permissions([PrereqObject|Rest], PermissionsList) -> +prereq_objects_to_permissions([PrereqObject | Rest], PermissionsList) -> RequiredPermission = prereq_object_permission(PrereqObject), UpdatedPermissionList = [RequiredPermission | PermissionsList ], prereq_objects_to_permissions(Rest, UpdatedPermissionList). @@ -293,5 +293,5 @@ delete_resource(Req, #base_state{chef_db_context = DbContext, {{halt, 500}, Req, State#base_state{log_msg = ERROR}} end. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_groups.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_groups.erl index bef3fed304..89e02530b6 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_groups.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_policy_groups.erl @@ -78,7 +78,7 @@ malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). -spec to_json(#wm_reqdata{}, #base_state{}) -> {binary(), #wm_reqdata{}, #base_state{}}. -to_json(Req, #base_state{chef_db_context = DbContext,organization_guid = OrgId, reqid = ReqId, resource_state = StubRec} = State) -> +to_json(Req, #base_state{chef_db_context = DbContext, organization_guid = OrgId, reqid = ReqId, resource_state = StubRec} = State) -> %% we query for just the names first so we get groups that are empty, %% then we get the list of all policy revisions associated to a group and %% fill this in to the data structure from the first step. @@ -99,13 +99,13 @@ make_base_ejson(Names, Req) -> UriMap= [{Name, RouteFun(Name)} || Name <- Names], build_nested_base_ejson(UriMap, {[]}). -build_nested_base_ejson([{[PolicyGroupName], URI}|Rest], EJSON) -> +build_nested_base_ejson([{[PolicyGroupName], URI} | Rest], EJSON) -> NewEJSON = ej:set_p({PolicyGroupName, "uri"}, EJSON, URI), build_nested_base_ejson(Rest, NewEJSON); build_nested_base_ejson([], EJSON) -> EJSON. -build_nested_list_data([Row|Rest], EJSON) -> +build_nested_list_data([Row | Rest], EJSON) -> {PolicyGroupName, PolicyName, RevisionID} = Row, NewEJSON = ej:set_p({PolicyGroupName, "policies", PolicyName, "revision_id"}, EJSON, RevisionID), build_nested_list_data(Rest, NewEJSON); diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_sup.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_sup.erl index b21f64f7fe..2f2c4d371c 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_sup.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_sup.erl @@ -64,7 +64,7 @@ init([]) -> Services1 = case chef_cbv_cache:enabled() of true -> [{chef_cbv_cache, {chef_cbv_cache, start_link, []}, - permanent, 5000, worker, [chef_cvb_cache]}| Services]; + permanent, 5000, worker, [chef_cvb_cache]} | Services]; false -> Services end, @@ -88,8 +88,8 @@ dispatch_table() -> maybe_add_default_org_routes(Dispatch) -> case oc_chef_wm_routes:default_orgname() of DefaultOrgName when is_binary(DefaultOrgName), - byte_size(DefaultOrgName) > 0-> - add_default_org_routes(Dispatch,DefaultOrgName); + byte_size(DefaultOrgName) > 0 -> + add_default_org_routes(Dispatch, DefaultOrgName); _ -> Dispatch end. @@ -184,7 +184,7 @@ default_resource_init() -> ], case envy:get(oc_chef_wm, request_tracing, undefined, boolean) of true -> - [{trace, true}|Defaults]; + [{trace, true} | Defaults]; _ -> Defaults end. diff --git a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_users.erl b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_users.erl index 67947f06b1..0bdee16266 100644 --- a/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_users.erl +++ b/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_users.erl @@ -142,5 +142,5 @@ verbose_user(#chef_user{username = UserName, email = EMail, serialized_object = conflict_message(_Name) -> {[{<<"error">>, [<<"Username or email address already in use.">>]}]}. -malformed_request_message(Any, _Req, _state) -> +malformed_request_message(Any, _Req, _State) -> error({unexpected_malformed_request_message, Any}). diff --git a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_action_tests.erl b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_action_tests.erl index c4a6a9fe54..bdf9c87bd1 100755 --- a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_action_tests.erl +++ b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_action_tests.erl @@ -54,7 +54,7 @@ msg_with_payload(Task) -> {<<"task">>, Task}, {<<"entity_type">>, <<"node">>}, {<<"entity_name">>, <<"db">>}, - {<<"data">>, {[{<<"name">>,<<"db">>}]}} + {<<"data">>, {[{<<"name">>, <<"db">>}]}} ]}. task_for_cookbooks_test_() -> @@ -99,7 +99,7 @@ extract_entity_info_test_() -> fun() -> oc_chef_wm_test_utils:setup(MockedModules) end, fun(_) -> oc_chef_wm_test_utils:cleanup(MockedModules) end, [{"client entity info", - fun() -> State = #client_state{client_data = {[{<<"name">>,<<"node-foo">>}]} }, + fun() -> State = #client_state{client_data = {[{<<"name">>, <<"node-foo">>}]} }, meck:expect(chef_wm_util,object_name, fun(client, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"node-foo">>}]}, <<"client">>, <<"node-foo">>), @@ -138,49 +138,49 @@ extract_entity_info_test_() -> ?assertEqual(Expected, Ret) end}, {"environment entity info", - fun() -> State = #environment_state{environment_data = {[{<<"name">>,<<"production">> }]} }, + fun() -> State = #environment_state{environment_data = {[{<<"name">>, <<"production">> }]} }, meck:expect(chef_wm_util,object_name, fun(environment, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"production">>}]}, <<"environment">>, <<"production">>), ?assertEqual(Expected, Ret) end}, {"group entity info", - fun() -> State = #group_state{group_data = {[{<<"name">>,<<"sysadmins">> }]} }, + fun() -> State = #group_state{group_data = {[{<<"name">>, <<"sysadmins">> }]} }, meck:expect(chef_wm_util,extract_from_path, fun(group_name, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"sysadmins">>}]}, <<"group">>, <<"sysadmins">>), ?assertEqual(Expected, Ret) end}, {"node entity info", - fun() -> State = #node_state{node_data = {[{<<"name">>,<<"node-foo">> }]} }, + fun() -> State = #node_state{node_data = {[{<<"name">>, <<"node-foo">> }]} }, meck:expect(chef_wm_util,object_name, fun(node, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"node-foo">>}]}, <<"node">>, <<"node-foo">>), ?assertEqual(Expected, Ret) end}, {"role entity info", - fun() -> State = #role_state{role_data = {[{<<"name">>,<<"webserver">> }]} }, + fun() -> State = #role_state{role_data = {[{<<"name">>, <<"webserver">> }]} }, meck:expect(chef_wm_util,object_name, fun(role, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"webserver">>}]}, <<"role">>, <<"webserver">>), ?assertEqual(Expected, Ret) end}, {"user entity info", - fun() -> State = #user_state{user_data = {[{<<"name">>,<<"webserver">> }]} }, + fun() -> State = #user_state{user_data = {[{<<"name">>, <<"webserver">> }]} }, meck:expect(chef_wm_util,object_name, fun(user, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = entity({[{<<"name">>, <<"webserver">>}]}, <<"user">>, <<"webserver">>), ?assertEqual(Expected, Ret) end}, {"keys entity info", - fun() -> State = #key_state{key_data= {[{<<"name">>,<<"new-key">> }]}, parent_name = <<"bob">>, type = client}, + fun() -> State = #key_state{key_data= {[{<<"name">>, <<"new-key">> }]}, parent_name = <<"bob">>, type = client}, meck:expect(chef_wm_util,object_name, fun(key, req) -> undefined end), Ret = oc_chef_action:extract_entity_info(req, State), Expected = parent_entity({[{<<"name">>, <<"new-key">>}]}, <<"client">>, <<"bob">>, <<"key">>, <<"new-key">>), ?assertEqual(Expected, Ret) end}, {"policy entity info", - fun() -> State = #policy_state{policy_data= {[{<<"name">>,<<"expected_policy_name">>}]}}, + fun() -> State = #policy_state{policy_data= {[{<<"name">>, <<"expected_policy_name">>}]}}, Stub = fun(policy, req) -> <<"expected_policy_name">>; (policy_group_asoc_name, req) -> @@ -210,7 +210,7 @@ hostname_test_() -> HostFQDN = <<"hostname.example.com">>, ok = application:set_env(oc_chef_wm, actions_fqdn, HostFQDN), [{"gets fqdn correctly", - fun()-> Ret = oc_chef_action:hostname(), + fun() -> Ret = oc_chef_action:hostname(), ?assertEqual(<<"hostname.example.com">>, Ret) end } @@ -259,7 +259,7 @@ create_message_test_() -> State = #base_state{requestor = #chef_requestor{name = <<"rob">>, type = <<"user">>}, reqid = <<"Xfh5mCQvjRgWDdlevrdyGt8M4lecXmN3gpGXrKKiUYqKdeD3">>, organization_name = <<"cmwest">>, - resource_state=#node_state{node_data = {[{<<"name">>,<<"db">>}]} }}, + resource_state=#node_state{node_data = {[{<<"name">>, <<"db">>}]} }}, ok = application:set_env(oc_chef_wm, actions_fqdn, HostFQDN), {foreach, fun() -> oc_chef_wm_test_utils:setup(MockedModules), diff --git a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_data_collector_tests.erl b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_data_collector_tests.erl index 74e325542e..11f3195c81 100755 --- a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_data_collector_tests.erl +++ b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_data_collector_tests.erl @@ -107,10 +107,10 @@ test_notify_association() -> {"POST", 201, 'invite'}]]. test_notify_client() -> - ResourceState = #client_state{client_data = {[{<<"name">>,<<"node-foo">>}]}}, + ResourceState = #client_state{client_data = {[{<<"name">>, <<"node-foo">>}]}}, ExpectedMsgData = {"client", "node-foo", - [{<<"data">>, {[{<<"name">>,<<"node-foo">>}]}}]}, + [{<<"data">>, {[{<<"name">>, <<"node-foo">>}]}}]}, [ test_notify("client", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -134,7 +134,7 @@ test_notify_cookbook() -> test_notify_environment() -> ResourceState = #environment_state{environment_data=entity_data()}, - ExpectedMsgData = {"environment", "db", [{<<"data">>, {[{<<"name">>,<<"db">>}]}}]}, + ExpectedMsgData = {"environment", "db", [{<<"data">>, {[{<<"name">>, <<"db">>}]}}]}, [ test_notify("environment", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -145,7 +145,7 @@ test_notify_group() -> ResourceState = #group_state{group_data=entity_data(), group_authz_id='11111', oc_chef_group = #oc_chef_group{}}, - ExpectedMsgData = {"group", "db", [{<<"data">>, {[{<<"name">>,<<"db">>}]}}]}, + ExpectedMsgData = {"group", "db", [{<<"data">>, {[{<<"name">>, <<"db">>}]}}]}, [ test_notify("group", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -154,7 +154,7 @@ test_notify_group() -> test_notify_node() -> ResourceState = #node_state{node_data=entity_data()}, - ExpectedMsgData = {"node", "db", [{<<"data">>, {[{<<"name">>,<<"db">>}]}}]}, + ExpectedMsgData = {"node", "db", [{<<"data">>, {[{<<"name">>, <<"db">>}]}}]}, [ test_notify("node", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -162,9 +162,9 @@ test_notify_node() -> {"DELETE", 200, 'delete'}]]. test_notify_organization() -> - ResourceState = #organization_state{organization_data = {[{<<"name">>,<<"cmwest">>}]}, + ResourceState = #organization_state{organization_data = {[{<<"name">>, <<"cmwest">>}]}, oc_chef_organization = #oc_chef_organization{name = <<"cmwest">>}}, - ExpectedMsgData = {"organization", "cmwest", [{<<"data">>, {[{<<"name">>,<<"cmwest">>}]}}]}, + ExpectedMsgData = {"organization", "cmwest", [{<<"data">>, {[{<<"name">>, <<"cmwest">>}]}}]}, [ test_notify("organization", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -173,7 +173,7 @@ test_notify_organization() -> test_notify_role() -> ResourceState = #role_state{role_data=entity_data()}, - ExpectedMsgData = {"role", "db", [{<<"data">>, {[{<<"name">>,<<"db">>}]}}]}, + ExpectedMsgData = {"role", "db", [{<<"data">>, {[{<<"name">>, <<"db">>}]}}]}, [ test_notify("role", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -182,7 +182,7 @@ test_notify_role() -> test_notify_user() -> ResourceState = #user_state{user_data=entity_data()}, - ExpectedMsgData = {"user", "db", [{<<"data">>, {[{<<"name">>,<<"db">>}]}}]}, + ExpectedMsgData = {"user", "db", [{<<"data">>, {[{<<"name">>, <<"db">>}]}}]}, [ test_notify("user", ReqMethod, ResponseCode, ResourceState, ExpectedMsgData, Task) || {ReqMethod, ResponseCode, Task} <- [ {"PUT", 200, 'update'}, {"PUT", 201, 'update'}, @@ -204,7 +204,7 @@ test_notify_key() -> {"POST", 201, 'create'}, {"DELETE", 200, 'delete'}]]. test_notify_policy() -> - ResourceState = #policy_state{policy_data= {[{<<"name">>,<<"expected_policy_name">>}]}}, + ResourceState = #policy_state{policy_data= {[{<<"name">>, <<"expected_policy_name">>}]}}, ExpectedMsgData = {"policy", "expected_policy_name", [{<<"parent_type">>, <<"policy_group">>}, diff --git a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_wm_authn_ldap_tests.erl b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_wm_authn_ldap_tests.erl index 6b8954ca43..23a86a0380 100644 --- a/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_wm_authn_ldap_tests.erl +++ b/src/oc_erchef/apps/oc_chef_wm/test/oc_chef_wm_authn_ldap_tests.erl @@ -68,17 +68,17 @@ value_of_test_() -> Data = [{"key1", ["a_value"]}, {"key2", ["first", "second"]}], [{"returns a scalar (binary) value for the given key in a proplist where the values are arrays", - fun()-> + fun() -> ?assertEqual(<<"a_value">>, oc_chef_wm_authn_ldap:value_of("key1", Data, "default")) end }, {"returns the first value when there are multiple items in the array", - fun()-> + fun() -> ?assertEqual(<<"first">>, oc_chef_wm_authn_ldap:value_of("key2", Data, "default")) end }, {"returns the default if the key is missing", - fun()-> + fun() -> ?assertEqual(<<"default">>, oc_chef_wm_authn_ldap:value_of("key3", Data, "default")) end } @@ -86,19 +86,19 @@ value_of_test_() -> canonical_username_test_() -> [{"returns a lowercased bindary", - fun()-> + fun() -> ?assertEqual(<<"foobar">>, oc_chef_wm_authn_ldap:canonical_username("FOOBAR")) end}, {"replaces special characters with _", - fun()-> + fun() -> ?assertEqual(<<"f_o_o_b_a_r">>, oc_chef_wm_authn_ldap:canonical_username("f^o&o)b@a$r")) end}, {"does not replace 0-9", - fun()-> + fun() -> ?assertEqual(<<"0123456789">>, oc_chef_wm_authn_ldap:canonical_username("0123456789")) end}, {"does not replace -", - fun()-> + fun() -> ?assertEqual(<<"foo-bar">>, oc_chef_wm_authn_ldap:canonical_username("foo-bar")) end} ]. diff --git a/src/oc_erchef/config/app.config b/src/oc_erchef/config/app.config index 5ab30c7a41..417af05d04 100644 --- a/src/oc_erchef/config/app.config +++ b/src/oc_erchef/config/app.config @@ -1,6 +1,6 @@ %% -*- mode: erlang -*- [ - {kernel, [{start_pg2, true}]}, + {kernel, [{start_pg, true}]}, %% SASL config {sasl, [ {sasl_error_logger, {file, "log/sasl-error.log"}}, diff --git a/src/oc_erchef/doc/rfc-caching-search-results.md b/src/oc_erchef/doc/rfc-caching-search-results.md index 460d5cb1fa..bca0958ed9 100644 --- a/src/oc_erchef/doc/rfc-caching-search-results.md +++ b/src/oc_erchef/doc/rfc-caching-search-results.md @@ -138,7 +138,7 @@ in the search resource. In `oc_chef_wm` add a dependency on eredis. The eredis client is a gen_server with async send/receive. Since exclusive access is not -needed, we could use pg2 to create a handful of clients to use. Write +needed, we could use pg to create a handful of clients to use. Write the cache logic. diff --git a/src/oc_erchef/elvis.config b/src/oc_erchef/elvis.config index 2dd1b191e0..9ce47d7f08 100644 --- a/src/oc_erchef/elvis.config +++ b/src/oc_erchef/elvis.config @@ -13,8 +13,8 @@ chef_test_suite_helper, oc_chef_wm_app]}}, {elvis_style, no_nested_try_catch}, - {elvis_style, no_tabs}, - {elvis_style, no_trailing_whitespace}, + {elvis_text_style, no_tabs}, + {elvis_text_style, no_trailing_whitespace}, {elvis_style, operator_spaces}, {elvis_style, used_ignored_variable}, {elvis_style, variable_naming_convention} diff --git a/src/oc_erchef/habitat/plan.sh b/src/oc_erchef/habitat/plan.sh index 521ddc905f..4b27ec1c64 100644 --- a/src/oc_erchef/habitat/plan.sh +++ b/src/oc_erchef/habitat/plan.sh @@ -3,15 +3,15 @@ pkg_origin=chef pkg_license=('Apache-2.0') pkg_maintainer="The Chef Server Maintainers " pkg_deps=( - core/erlang24 + core/erlang26 core/cacerts core/coreutils core/curl core/openssl core/gcc-libs - core/ruby30 - core/sqitch_pg - core/gecode + core/ruby3_4 + core/sqitch + core/gecode3 core/libffi core/glibc ) @@ -75,14 +75,14 @@ do_prepare() { do_build() { - _ruby_dir="$(pkg_path_for core/ruby30)" + _ruby_dir="$(pkg_path_for core/ruby3_4)" export REL_VERSION=$pkg_version export USE_SYSTEM_GECODE=1 export GEM_HOME="${pkg_path}/vendor/bundle" export GEM_PATH="${_ruby_dir}:${GEM_HOME}" - export LIBRARY_PATH="$(pkg_path_for core/gecode)/lib" - export LD_LIBRARY_PATH="$(pkg_path_for core/gecode)/lib" - export CPLUS_INCLUDE_PATH="$(pkg_path_for core/gecode)/include" + export LIBRARY_PATH="$(pkg_path_for core/gecode3)/lib" + export LD_LIBRARY_PATH="$(pkg_path_for core/gecode3)/lib" + export CPLUS_INCLUDE_PATH="$(pkg_path_for core/gecode3)/include" mkdir -p "$GEM_HOME" make omnibus diff --git a/src/oc_erchef/priv/reindex-opc-organization b/src/oc_erchef/priv/reindex-opc-organization index 536659d61b..e8fd8c734b 100755 --- a/src/oc_erchef/priv/reindex-opc-organization +++ b/src/oc_erchef/priv/reindex-opc-organization @@ -137,7 +137,7 @@ print_missing(Missing, OrgName) -> make_context(OrgName, IntLB) -> {ok, ServerAPIMinVersion} = rpc:call(?ERCHEF, oc_erchef_app, server_api_version, [min]), - ReqId = base64:encode(erlang:md5(term_to_binary(make_ref()))), + ReqId = base64:encode(erlang:md5(term_to_binary(make_ref(), [{minor_version, 1}]))), % TODO api versioning to be handled when we move this into an omnibus template rpc:call(?ERCHEF, chef_db, make_context, [ServerAPIMinVersion, ReqId, find_dl_headers(OrgName, IntLB)]). @@ -165,7 +165,8 @@ find_dl_headers(OrgName, IntLB) when is_list(OrgName) -> % to nginx in the request but skipping the verification as we are making the RPC call. % as chef-server-ctl reindex --all-org can be triggered only from inside the box and with root privileges. Ssl3 = proplists:delete(verify, proplists:get_value(ssl_options, Ssl2,[])), - {ok, "200", _Headers, Body} = rpc:call(?ERCHEF, ibrowse,send_req, [IntLB ++ "/_route/organizations/" ++ OrgName, [], get, [], [{ssl_options, Ssl3}]]), + Ssl4 = [{verify, verify_none} | Ssl3], + {ok, "200", _Headers, Body} = rpc:call(?ERCHEF, ibrowse,send_req, [IntLB ++ "/_route/organizations/" ++ OrgName, [], get, [], [{ssl_options, Ssl4}]]), Json = rpc:call(?ERCHEF, jiffy, decode, [Body]), SubJson = rpc:call(?ERCHEF, ej, get, [{<<"config">>, <<"merged">>}, Json]), {KVList} = SubJson, diff --git a/src/oc_erchef/priv/reindex-opc-piecewise b/src/oc_erchef/priv/reindex-opc-piecewise index 95b0d885cf..5640a881a7 100644 --- a/src/oc_erchef/priv/reindex-opc-piecewise +++ b/src/oc_erchef/priv/reindex-opc-piecewise @@ -164,7 +164,7 @@ print_missing(Missing) -> make_context(OrgName, IntLB) -> {ok, ServerAPIMinVersion} = rpc:call(?ERCHEF, oc_erchef_app, server_api_version, [min]), - ReqId = base64:encode(erlang:md5(term_to_binary(make_ref()))), + ReqId = base64:encode(erlang:md5(term_to_binary(make_ref(), [{minor_version, 1}]))), % TODO api versioning to be handled when we move this into an omnibus template rpc:call(?ERCHEF, chef_db, make_context, [ServerAPIMinVersion, ReqId, find_dl_headers(OrgName, IntLB)]). diff --git a/src/oc_erchef/rebar.config b/src/oc_erchef/rebar.config index ff9336f9a0..38b54e3729 100644 --- a/src/oc_erchef/rebar.config +++ b/src/oc_erchef/rebar.config @@ -2,7 +2,7 @@ %% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*- %% ex: ts=4 sw=4 ft=erlang et -{require_otp_vsn, "24.3.2"}. +{require_otp_vsn, "26.2.5.2"}. {deps, [ %% lager has to come first since we use its parse transform @@ -26,10 +26,10 @@ {git, "https://github.com/chef/ej", {branch, "master"}}}, {envy, ".*", {git, "https://github.com/markan/envy", {branch, "master"}}}, - {eper, ".*", - {git, "https://github.com/massemanet/eper", {branch, "master"}}}, +% {eper, ".*", +% {git, "https://github.com/massemanet/eper", {branch, "master"}}}, {erlcloud, ".*", - {git, "https://github.com/chef/erlcloud", {branch, "lbaker/presigned-headers"}}}, + {git, "https://github.com/chef/erlcloud", {branch, "CHEF-11677/CHEF-12498/lbaker"}}}, {erlware_commons, ".*", {git, "https://github.com/chef/erlware_commons", {branch, "lbaker/fix_for_ftmap"}}}, {folsom, ".*", @@ -141,7 +141,8 @@ common_test, ssl, eunit - ]} + ]}, + {warnings, [no_unknown]} ]}. {relx, [ @@ -153,7 +154,7 @@ {mixer, load}, syntax_tools, compiler, - eper, + %eper, observer_cli, efast_xs ]}, diff --git a/src/oc_erchef/rebar.lock b/src/oc_erchef/rebar.lock index 89ca7dcf05..4696f14083 100644 --- a/src/oc_erchef/rebar.lock +++ b/src/oc_erchef/rebar.lock @@ -4,7 +4,7 @@ 1}, {<<"bcrypt">>, {git,"https://github.com/erlangpack/bcrypt", - {ref,"826291ebd232ebfc4fb69f1c27d35706eecb8fc2"}}, + {ref,"564b39e2f86a0e3d8ca08dcd1aae924acffe1408"}}, 0}, {<<"bear">>, {git,"https://github.com/boundary/bear.git", @@ -20,7 +20,7 @@ 0}, {<<"chef_secrets">>, {git,"https://github.com/chef/chef_secrets", - {ref,"6fa36689fd599602e5985587a1497282df2d907a"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"darklaunch">>, {git,"https://github.com/chef/opscode-darklaunch-erlang", @@ -40,23 +40,19 @@ 1}, {<<"ej">>, {git,"https://github.com/chef/ej", - {ref,"f843f4da1cb7d8d2414adccc37fe523e3f92d789"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"envy">>, {git,"https://github.com/markan/envy", {ref,"0148fb4b7ed0e188511578e98b42d6e7dde0ebd1"}}, 0}, - {<<"eper">>, - {git,"https://github.com/massemanet/eper", - {ref,"17b0f97ea8287b72e8ebbe7132214db182ff1a1d"}}, - 0}, {<<"epgsql">>, {git,"https://github.com/chef/epgsql-1.git", - {ref,"34b4182f0e21f9189ddd7b2e290f01a9e7d93bf1"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"erlcloud">>, {git,"https://github.com/chef/erlcloud", - {ref,"27724cc615bb71595e88665ffd3ea083bf51ecb3"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"erlware_commons">>, {git,"https://github.com/chef/erlware_commons", @@ -95,8 +91,8 @@ {ref,"a140ea935eae9149bb35234bb40f6acf1c69caa1"}}, 0}, {<<"lhttpc">>, - {git,"https://github.com/erlcloud/lhttpc", - {ref,"8e34985a3cd0ac2a7fc2a88a041554c64d33e74b"}}, + {git,"https://github.com/chef/lhttpc", + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}, {<<"meck">>, {git,"https://github.com/eproxus/meck", @@ -104,7 +100,7 @@ 1}, {<<"mini_s3">>, {git,"https://github.com/chef/mini_s3", - {ref,"4dd584fce031d35bbe5c4b72a04660b75673ca21"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"mixer">>, {git,"https://github.com/inaka/mixer", @@ -132,7 +128,7 @@ 0}, {<<"opscoderl_wm">>, {git,"https://github.com/chef/opscoderl_wm", - {ref,"5436cc600db462226a5d2f3ed585ab39eaf20ee5"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"poolboy">>, {git,"https://github.com/devinus/poolboy", @@ -140,7 +136,7 @@ 1}, {<<"pooler">>, {git,"https://github.com/chef/pooler", - {ref,"681c355abaacc5487ddf41a84b9ed53151a765fe"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"prometheus">>, {git,"https://github.com/deadtrickster/prometheus.erl", @@ -156,7 +152,7 @@ 1}, {<<"sqerl">>, {git,"https://github.com/chef/sqerl", - {ref,"ebbe4c20ab5cd21041229d22dd60a6b38aa2930c"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 0}, {<<"stats_hero">>, {git,"https://github.com/chef/stats_hero", @@ -172,5 +168,5 @@ 0}, {<<"webmachine">>, {git,"https://github.com/chef/webmachine", - {ref,"1389b01a9fbc25d36aad8956e08d2d0db242625f"}}, + {branch,"CHEF-11677/CHEF-12498/lbaker"}}, 1}]. diff --git a/src/oc_erchef/rebar3 b/src/oc_erchef/rebar3 index ed2a36d577..bf21708519 100755 Binary files a/src/oc_erchef/rebar3 and b/src/oc_erchef/rebar3 differ diff --git a/src/oc_erchef/src/oc_erchef_app.erl b/src/oc_erchef/src/oc_erchef_app.erl index 2de306c3e9..9862a90cfc 100644 --- a/src/oc_erchef/src/oc_erchef_app.erl +++ b/src/oc_erchef/src/oc_erchef_app.erl @@ -22,7 +22,7 @@ start(_StartType, _StartArgs) -> %% See comment in app.src for details. { ok, AppList } = application:get_key(oc_erchef, included_applications), [ application:ensure_all_started(App, permanent) || App <- AppList ], - {error,{already_started,_}} = application:start(chef_telemetry), + {error, {already_started, _}} = application:start(chef_telemetry), %% If we're in a dev vm environment, start the code sync & compile tools case os:getenv("DEVVM") of "1" -> @@ -30,7 +30,7 @@ start(_StartType, _StartArgs) -> SrcDir = filename:join([Dir, "../../../../../..", "external-deps"]), EbinDir = filename:join([Dir, "../../../../../..", "external-deps/ebin"]), application:set_env(sync, src_dirs, {add, [{SrcDir, - [{outdir,EbinDir}]}]}), + [{outdir, EbinDir}]}]}), application:set_env(sync, sync_method, scanner), application:start(sync); _ -> ok @@ -41,7 +41,7 @@ start(_StartType, _StartArgs) -> stop(_State) -> ok. --spec server_api_version(min|max|deprecated) -> {ok, api_version()}. +-spec server_api_version(min | max | deprecated) -> {ok, api_version()}. server_api_version(min) -> {ok, ?API_MIN_VER}; server_api_version(max) ->