From 4a2f59913c46994fd872d9c3065ac1d46a1c3814 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Fri, 5 Feb 2021 11:49:54 +0700 Subject: [PATCH 1/9] cowboy 2.8.0 --- rebar.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rebar.config b/rebar.config index c240d9e..7d2a49a 100644 --- a/rebar.config +++ b/rebar.config @@ -11,5 +11,5 @@ ]}. {deps, [ - {cowboy, "1.0.3",{git, "https://github.com/ninenines/cowboy.git", {tag, "1.0.3"}}} + {cowboy, "2.8.0",{git, "https://github.com/ninenines/cowboy.git", {tag, "2.8.0"}}} ]}. From fb8e70bd420f887e7ea4fdf056eb9ca7a9fba710 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Fri, 5 Feb 2021 17:51:42 +0700 Subject: [PATCH 2/9] Update some code --- src/sockjs_cowboy_handler.erl | 8 ++------ src/sockjs_handler.erl | 6 +++--- src/sockjs_http.erl | 16 ++++++++-------- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index c66c9d4..6a211c7 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -3,7 +3,7 @@ -behaviour(cowboy_websocket_handler). %% Cowboy http callbacks --export([init/3, handle/2, terminate/3]). +-export([init/2, terminate/3]). %% Cowboy ws callbacks -export([websocket_init/3, websocket_handle/3, @@ -13,7 +13,7 @@ %% -------------------------------------------------------------------------- -init({_Any, http}, Req, Service) -> +init(#{ref := http} = Req, Service) -> case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of {true, {cowboy, _Req1}, _Reason} -> {upgrade, protocol, cowboy_websocket}; @@ -21,10 +21,6 @@ init({_Any, http}, Req, Service) -> {ok, Req1, Service} end. -handle(Req, Service) -> - {cowboy, Req3} = sockjs_handler:handle_req(Service, {cowboy, Req}), - {ok, Req3, Service}. - terminate(_Reason, _Req, _Service) -> ok. diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl index 4e7db58..d7bada9 100644 --- a/src/sockjs_handler.erl +++ b/src/sockjs_handler.erl @@ -103,10 +103,10 @@ strip_prefix(LongPath, Prefix) -> -spec dispatch_req(service(), req()) -> {dispatch_result(), req()}. dispatch_req(#service{prefix = Prefix}, Req) -> - {Method, Req1} = sockjs_http:method(Req), - {LongPath, Req2} = sockjs_http:path(Req1), + Method = sockjs_http:method(Req), + LongPath = sockjs_http:path(Req), {ok, PathRemainder} = strip_prefix(LongPath, Prefix), - {dispatch(Method, PathRemainder), Req2}. + {dispatch(Method, PathRemainder), Req}. -spec dispatch(atom(), nonempty_string()) -> dispatch_result(). dispatch(Method, Path) -> diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl index 59074e0..93205f0 100644 --- a/src/sockjs_http.erl +++ b/src/sockjs_http.erl @@ -8,13 +8,13 @@ %% -------------------------------------------------------------------------- --spec path(req()) -> {string(), req()}. -path({cowboy, Req}) -> {Path, Req1} = cowboy_req:path(Req), - {binary_to_list(Path), {cowboy, Req1}}. +-spec path(req()) -> string(). +path({cowboy, Req}) -> Path = cowboy_req:path(Req), + binary_to_list(Path). --spec method(req()) -> {atom(), req()}. -method({cowboy, Req}) -> {Method, Req1} = cowboy_req:method(Req), - {method_atom(Method), {cowboy, Req1}}. +-spec method(req()) -> atom(). +method({cowboy, Req}) -> Method = cowboy_req:method(Req), + method_atom(Method). -spec method_atom(binary() | atom()) -> atom(). method_atom(<<"GET">>) -> 'GET'; @@ -33,7 +33,7 @@ method_atom('PATCH') -> 'PATCH'; method_atom('HEAD') -> 'HEAD'. -spec body(req()) -> {binary(), req()}. -body({cowboy, Req}) -> {ok, Body, Req1} = cowboy_req:body(Req), +body({cowboy, Req}) -> {ok, Body, Req1} = cowboy_req:read_body(Req), {Body, {cowboy, Req1}}. -spec body_qs(req()) -> {binary(), req()}. @@ -47,7 +47,7 @@ body_qs(Req) -> body_qs2(Req1) end. body_qs2({cowboy, Req}) -> - {ok, BodyQS, Req1} = cowboy_req:body_qs(Req), + {ok, BodyQS, Req1} = cowboy_req:read_urlencoded_body(Req), case proplists:get_value(<<"d">>, BodyQS) of undefined -> {<<>>, {cowboy, Req1}}; From 44d84079d943d22a68e050b1df0f38210c72800f Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 16:50:06 +0700 Subject: [PATCH 3/9] tidy up --- .../rebar_compiler_erl/source_apps.dag | Bin 0 -> 439 bytes .../plugins/erl_tidy_prv_fmt/.gitignore | 18 + .../default/plugins/erl_tidy_prv_fmt/LICENSE | 29 + .../plugins/erl_tidy_prv_fmt/README.md | 27 + .../plugins/erl_tidy_prv_fmt/rebar.config | 2 + .../plugins/erl_tidy_prv_fmt/rebar.lock | 1 + .../erl_tidy_prv_fmt/src/erl_tidy.app.src | 9 + .../plugins/erl_tidy_prv_fmt/src/erl_tidy.erl | 1914 +++++++++++++++++ .../erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl | 73 + rebar.config | 2 + src/mochijson2_fork.erl | 1030 ++++----- src/mochinum_fork.erl | 341 +-- src/sockjs.erl | 48 +- src/sockjs_action.erl | 375 ++-- src/sockjs_app.erl | 8 +- src/sockjs_cowboy_handler.erl | 126 +- src/sockjs_filters.erl | 82 +- src/sockjs_handler.erl | 308 +-- src/sockjs_http.erl | 158 +- src/sockjs_json.erl | 11 +- src/sockjs_multiplex.erl | 201 +- src/sockjs_service.erl | 11 +- src/sockjs_session_sup.erl | 19 +- src/sockjs_util.erl | 33 +- src/sockjs_ws_handler.erl | 63 +- 25 files changed, 3346 insertions(+), 1543 deletions(-) create mode 100644 _build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/.gitignore create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/LICENSE create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/README.md create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/rebar.config create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/rebar.lock create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl create mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl diff --git a/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag b/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag new file mode 100644 index 0000000000000000000000000000000000000000..88f76700b1dd642d382e634cf515847100f4d5bc GIT binary patch literal 439 zcmV;o0Z9IXPyhf6O?Y1IR=rNcFc5Z=wp6f`H(+3GELv0~M&5v-Ag1UNJB?#EiJYWT zpBM2SoRg+a2NQxRzw}`MjNLpl}Oo|!T3eq+8gGwky8IPb+CCyc}7HL88)-MIy z`MKWu(Waz45b5F63pi|Q4xmdBL0B5-D|0e!S@~OJAhNV&XD-KCRWgBu-!4Otk!!|- zYVC_O$_(H@NV4i5tI%O<{iA>DA`Ml=c#H7bI|6=$=4GYR#E2#6-9E+m9JPW{ik;XE zP#|C6z$A8I3U{zD*vk`aiBIYL!CO!z0*bqotJxB_`-sp8 literal 0 HcmV?d00001 diff --git a/_build/default/plugins/erl_tidy_prv_fmt/.gitignore b/_build/default/plugins/erl_tidy_prv_fmt/.gitignore new file mode 100644 index 0000000..40a1d4f --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/.gitignore @@ -0,0 +1,18 @@ +.rebar3 +_* +.eunit +*.o +*.beam +*.plt +*.swp +*.swo +.erlang.cookie +ebin +log +erl_crash.dump +.rebar +_rel +_deps +_plugins +_tdeps +logs diff --git a/_build/default/plugins/erl_tidy_prv_fmt/LICENSE b/_build/default/plugins/erl_tidy_prv_fmt/LICENSE new file mode 100644 index 0000000..f592795 --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2015, Tristan Sloughter . +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +* The names of its contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/README.md b/_build/default/plugins/erl_tidy_prv_fmt/README.md new file mode 100644 index 0000000..69ae99f --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/README.md @@ -0,0 +1,27 @@ +erl_tidy +===== + +Format your Erlang modules. + +Use +--- + +Add the following to your `rebar.config` + +```erlang +{plugins, [{erl_tidy_prv_fmt, ".*", {git, "git://github.com/tsloughter/erl_tidy.git", {branch, "master"}}}]}. +``` + +And run: + + $ rebar3 fmt + +Run `rebar3 help fmt` to learn about formatting options. + +You also may specify options in your `rebar.config` like so: +```erlang +{fmt_opts, [ + {auto_list_comp, false}, + {keep_unused, true} +]}. +``` \ No newline at end of file diff --git a/_build/default/plugins/erl_tidy_prv_fmt/rebar.config b/_build/default/plugins/erl_tidy_prv_fmt/rebar.config new file mode 100644 index 0000000..f618f3e --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/rebar.config @@ -0,0 +1,2 @@ +{erl_opts, [debug_info]}. +{deps, []}. \ No newline at end of file diff --git a/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock b/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock new file mode 100644 index 0000000..57afcca --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock @@ -0,0 +1 @@ +[]. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src new file mode 100644 index 0000000..fdbafb6 --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src @@ -0,0 +1,9 @@ +{application, erl_tidy, + [{description, "An OTP library"} + ,{vsn, "0.1.0"} + ,{registered, []} + ,{applications, + [kernel,stdlib]} + ,{env,[]} + ,{modules, []} + ]}. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl new file mode 100644 index 0000000..f2de12b --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl @@ -0,0 +1,1914 @@ +%% ===================================================================== +%% This library is free software; you can redistribute it and/or +%% modify it under the terms of the GNU Lesser General Public License +%% as published by the Free Software Foundation; either version 2 of +%% the License, or (at your option) any later version. +%% +%% This library is distributed in the hope that it will be useful, but +%% WITHOUT ANY WARRANTY; without even the implied warranty of +%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +%% Lesser General Public License for more details. +%% +%% You should have received a copy of the GNU Lesser General Public +%% License along with this library; if not, write to the Free Software +%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +%% USA +%% +%% @copyright 1999-2014 Richard Carlsson +%% @author Richard Carlsson +%% @end +%% ===================================================================== + +%% @doc Tidies and pretty-prints Erlang source code, removing unused +%% functions, updating obsolete constructs and function calls, etc. +%% +%% Caveats: It is possible that in some intricate uses of macros, +%% the automatic addition or removal of parentheses around uses or +%% arguments could cause the resulting program to be rejected by the +%% compiler; however, we have found no such case in existing +%% code. Programs defining strange macros can usually not be read by +%% this program, and in those cases, no changes will be made. +%% +%% If you really, really want to, you may call it "Inga". +%% +%% Disclaimer: The author accepts no responsibility for errors +%% introduced in code that has been processed by the program. It has +%% been reasonably well tested, but the possibility of errors remains. +%% Keep backups of your original code safely stored, until you feel +%% confident that the new, modified code can be trusted. + +-module(erl_tidy). + +-export([dir/0, dir/1, dir/2, file/1, file/2, module/1, module/2]). + +-include_lib("kernel/include/file.hrl"). + +-define(DEFAULT_BACKUP_SUFFIX, ".bak"). +-define(DEFAULT_DIR, ""). +-define(DEFAULT_REGEXP, ".*\\.erl$"). + +%% ===================================================================== + +-type options() :: [atom() | {atom(), any()}]. + +%% ===================================================================== + +dir__defaults() -> + [{follow_links, false}, + recursive, + {regexp, ?DEFAULT_REGEXP}, + verbose]. + +%% ===================================================================== +%% @spec dir() -> ok +%% @equiv dir("") + +-spec dir() -> 'ok'. + +dir() -> + dir(""). + +%% ===================================================================== +%% @spec dir(Dir) -> ok +%% @equiv dir(Dir, []) + +-spec dir(file:filename()) -> 'ok'. + +dir(Dir) -> + dir(Dir, []). + +%% ===================================================================== +%% @spec dir(Directory::filename(), Options::[term()]) -> ok +%% filename() = file:filename() +%% +%% @doc Tidies Erlang source files in a directory and its +%% subdirectories. +%% +%% Available options: +%%
+%%
{follow_links, boolean()}
+%% +%%
If the value is `true', symbolic directory +%% links will be followed. The default value is +%% `false'.
+%% +%%
{recursive, boolean()}
+%% +%%
If the value is `true', subdirectories will be +%% visited recursively. The default value is +%% `true'.
+%% +%%
{regexp, string()}
+%% +%%
The value denotes a regular expression (see module +%% `re'). Tidying will only be applied to those +%% regular files whose names match this pattern. The default +%% value is `".*\\.erl$"', which matches normal +%% Erlang source file names.
+%% +%%
{test, boolean()}
+%% +%%
If the value is `true', no files will be +%% modified. The default value is `false'.
+%% +%%
{verbose, boolean()}
+%% +%%
If the value is `true', progress messages will +%% be output while the program is running, unless the +%% `quiet' option is `true'. The default +%% value when calling {@link dir/2} is `true'.
+%% +%%
+%% +%% See the function {@link file/2} for further options. +%% +%% @see //stdlib/re +%% @see file/2 + +-record(dir, {follow_links = false :: boolean(), + recursive = true :: boolean(), + options :: options()}). + +-spec dir(file:filename(), options()) -> 'ok'. + +dir(Dir, Opts) -> + Opts1 = Opts ++ dir__defaults(), + Env = #dir{follow_links = proplists:get_bool(follow_links, Opts1), + recursive = proplists:get_bool(recursive, Opts1), + options = Opts1}, + Regexp = proplists:get_value(regexp, Opts1), + case filename(Dir) of + "" -> + Dir1 = "."; + Dir1 -> + ok + end, + dir_1(Dir1, Regexp, Env). + +dir_1(Dir, Regexp, Env) -> + case file:list_dir(Dir) of + {ok, Files} -> + lists:foreach(fun (X) -> dir_2(X, Regexp, Dir, Env) end, + Files); + {error, _} -> + report_error("error reading directory `~ts'", + [filename(Dir)]), + exit(error) + end. + +dir_2(Name, Regexp, Dir, Env) -> + File = if Dir =:= "" -> + Name; + true -> + filename:join(Dir, Name) + end, + case file_type(File) of + {value, regular} -> + dir_4(File, Regexp, Env); + {value, directory} when Env#dir.recursive =:= true -> + case is_symlink(Name) of + false -> + dir_3(Name, Dir, Regexp, Env); + true when Env#dir.follow_links =:= true -> + dir_3(Name, Dir, Regexp, Env); + _ -> + ok + end; + _ -> + ok + end. + +dir_3(Name, Dir, Regexp, Env) -> + Dir1 = filename:join(Dir, Name), + verbose("tidying directory `~ts'.", [Dir1], Env#dir.options), + dir_1(Dir1, Regexp, Env). + +dir_4(File, Regexp, Env) -> + case re:run(File, Regexp) of + {match, _} -> + Opts = [{outfile, File}, {dir, ""} | Env#dir.options], + case catch file(File, Opts) of + {'EXIT', Value} -> + warn("error tidying `~ts'.~n~p", [File,Value], Opts); + _ -> + ok + end; + nomatch -> + ok + end. + +file__defaults() -> + [{backup_suffix, ?DEFAULT_BACKUP_SUFFIX}, + backups, + {dir, ?DEFAULT_DIR}, + {printer, default_printer()}, + {quiet, false}, + {verbose, false}]. + +default_printer() -> + fun (Tree, Options) -> erl_prettypr:format(Tree, Options) end. + +%% ===================================================================== +%% @spec file(Name) -> ok +%% @equiv file(Name, []) + +-spec file(file:filename()) -> 'ok'. + +file(Name) -> + file(Name, []). + +%% ===================================================================== +%% @spec file(Name::filename(), Options::[term()]) -> ok +%% +%% @doc Tidies an Erlang source code file. +%% +%% Available options are: +%%
+%%
{backup_suffix, string()}
+%% +%%
Specifies the file name suffix to be used when a backup +%% file is created; the default value is `".bak"' +%% (cf. the `backups' option).
+%% +%%
{backups, boolean()}
+%% +%%
If the value is `true', existing files will be +%% renamed before new files are opened for writing. The new +%% names are formed by appending the string given by the +%% `backup_suffix' option to the original name. The +%% default value is `true'.
+%% +%%
{dir, filename()}
+%% +%%
Specifies the name of the directory in which the output +%% file is to be written. By default, the current directory is +%% used. If the value is an empty string, the current directory +%% is used.
+%% +%%
{outfile, filename()}
+%% +%%
Specifies the name of the file (without suffix) to which +%% the resulting source code is to be written. If this option is +%% not specified, the `Name' argument is used.
+%% +%%
{printer, Function}
+%%
    +%%
  • `Function = (syntaxTree()) -> string()'
  • +%%
+%% +%% Specifies a function for prettyprinting Erlang syntax trees. +%% This is used for outputting the resulting module definition. +%% The function is assumed to return formatted text for the given +%% syntax tree, and should raise an exception if an error occurs. +%% The default formatting function calls +%% `erl_prettypr:format/2'.
+%% +%%
{test, boolean()}
+%% +%%
If the value is `true', no files will be modified; this +%% is typically most useful if the `verbose' flag is enabled, to +%% generate reports about the program files without affecting +%% them. The default value is `false'.
+%% +%%
{stdout, boolean()}
+%% +%%
If the value is `true', instead of the file being written +%% to disk it will be printed to stdout. The default value is +%% `false'.
+%% +%%
+%% +%% See the function `module/2' for further options. +%% +%% @see erl_prettypr:format/2 +%% @see module/2 + +-spec file(file:filename(), options()) -> 'ok'. + +file(Name, Opts) -> + Parent = self(), + Child = spawn_link(fun () -> file_1(Parent, Name, Opts) end), + receive + {Child, ok} -> + ok; + {Child, {error, Reason}} -> + exit(Reason) + end. + +file_1(Parent, Name, Opts) -> + try file_2(Name, Opts) of + _ -> + Parent ! {self(), ok} + catch + throw:syntax_error -> % ignore syntax errors + Parent ! {self(), ok}; + error:Reason -> + Parent ! {self(), {error, Reason}} + end. + +file_2(Name, Opts) -> + Opts1 = Opts ++ file__defaults(), + Forms = read_module(Name, Opts1), + Comments = erl_comment_scan:file(Name), + Forms1 = erl_recomment:recomment_forms(Forms, Comments), + Tree = module(Forms1, [{file, Name} | Opts1]), + case proplists:get_bool(test, Opts1) of + true -> + ok; + false -> + case proplists:get_bool(stdout, Opts1) of + true -> + print_module(Tree, Opts1), + ok; + false -> + write_module(Tree, Name, Opts1), + ok + end + end. + +read_module(Name, Opts) -> + verbose("reading module `~ts'.", [filename(Name)], Opts), + case epp_dodger:parse_file(Name, [no_fail]) of + {ok, Forms} -> + check_forms(Forms, Name), + Forms; + {error, R} -> + error_read_file(Name), + exit({error, R}) + end. + +check_forms(Fs, Name) -> + Fun = fun (F) -> + case erl_syntax:type(F) of + error_marker -> + S = case erl_syntax:error_marker_info(F) of + {_, M, D} -> + M:format_error(D); + _ -> + "unknown error" + end, + report_error({Name, erl_syntax:get_pos(F), + "\n ~ts"}, [S]), + exit(error); + _ -> + ok + end + end, + lists:foreach(Fun, Fs). + +%% Create the target directory and make a backup file if necessary, +%% then open the file, output the text and close the file +%% safely. Returns the file name. + +write_module(Tree, Name, Opts) -> + Name1 = proplists:get_value(outfile, Opts, filename(Name)), + Dir = filename(proplists:get_value(dir, Opts, "")), + File = if Dir =:= "" -> + Name1; + true -> + case file_type(Dir) of + {value, directory} -> + ok; + {value, _} -> + report_error("`~ts' is not a directory.", + [filename(Dir)]), + exit(error); + none -> + case file:make_dir(Dir) of + ok -> + verbose("created directory `~ts'.", + [filename(Dir)], Opts), + ok; + E -> + report_error("failed to create " + "directory `~ts'.", + [filename(Dir)]), + exit({make_dir, E}) + end + end, + filename(filename:join(Dir, Name1)) + end, + Encoding = [{encoding,Enc} || Enc <- [epp:read_encoding(Name)], + Enc =/= none], + case proplists:get_bool(backups, Opts) of + true -> + backup_file(File, Opts); + false -> + ok + end, + Printer = proplists:get_value(printer, Opts), + FD = open_output_file(File, Encoding), + verbose("writing to file `~ts'.", [File], Opts), + V = (catch {ok, output(FD, Printer, Tree, Opts++Encoding)}), + ok = file:close(FD), + case V of + {ok, _} -> + File; + {'EXIT', R} -> + error_write_file(File), + exit(R); + R -> + error_write_file(File), + throw(R) + end. + +print_module(Tree, Opts) -> + Printer = proplists:get_value(printer, Opts), + io:format(Printer(Tree, Opts)). + +output(FD, Printer, Tree, Opts) -> + io:put_chars(FD, Printer(Tree, Opts)), + io:nl(FD). + +%% file_type(file:filename()) -> {value, Type} | none + +file_type(Name) -> + file_type(Name, false). + +is_symlink(Name) -> + file_type(Name, true) =:= {value, symlink}. + +file_type(Name, Links) -> + V = case Links of + true -> + catch file:read_link_info(Name); + false -> + catch file:read_file_info(Name) + end, + case V of + {ok, Env} -> + {value, Env#file_info.type}; + {error, enoent} -> + none; + {error, R} -> + error_read_file(Name), + exit({error, R}); + {'EXIT', R} -> + error_read_file(Name), + exit(R); + R -> + error_read_file(Name), + throw(R) + end. + +open_output_file(FName, Options) -> + case catch file:open(FName, [write]++Options) of + {ok, FD} -> + FD; + {error, R} -> + error_open_output(FName), + exit({error, R}); + {'EXIT', R} -> + error_open_output(FName), + exit(R); + R -> + error_open_output(FName), + exit(R) + end. + +%% If the file exists, rename it by appending the given suffix to the +%% file name. + +backup_file(Name, Opts) -> + case file_type(Name) of + {value, regular} -> + backup_file_1(Name, Opts); + {value, _} -> + error_backup_file(Name), + exit(error); + none -> + ok + end. + +%% The file should exist and be a regular file here. + +backup_file_1(Name, Opts) -> + Suffix = proplists:get_value(backup_suffix, Opts, ""), + Dest = filename:join(filename:dirname(Name), + filename:basename(Name) ++ Suffix), + case catch file:rename(Name, Dest) of + ok -> + verbose("made backup of file `~ts'.", [Name], Opts); + {error, R} -> + error_backup_file(Name), + exit({error, R}); + {'EXIT', R} -> + error_backup_file(Name), + exit(R); + R -> + error_backup_file(Name), + throw(R) + end. + +%% ===================================================================== +%% @spec module(Forms) -> syntaxTree() +%% @equiv module(Forms, []) + +-spec module(erl_syntax:forms()) -> erl_syntax:syntaxTree(). + +module(Forms) -> + module(Forms, []). + +%% ===================================================================== +%% @spec module(Forms, Options::[term()]) -> syntaxTree() +%% +%% Forms = syntaxTree() | [syntaxTree()] +%% syntaxTree() = erl_syntax:syntaxTree() +%% +%% @doc Tidies a syntax tree representation of a module +%% definition. The given `Forms' may be either a single +%% syntax tree of type `form_list', or a list of syntax +%% trees representing "program forms". In either case, +%% `Forms' must represent a single complete module +%% definition. The returned syntax tree has type +%% `form_list' and represents a tidied-up version of the +%% same source code. +%% +%% Available options are: +%%
+%%
{auto_export_vars, boolean()}
+%% +%%
If the value is `true', all matches +%% "`{V1, ..., Vn} = E'" where `E' is a +%% case-, if- or receive-expression whose branches all return +%% n-tuples (or explicitly throw exceptions) will be rewritten +%% to bind and export the variables `V1', ..., +%% `Vn' directly. The default value is `false'. +%% +%% For example: +%%
+%%                {X, Y} = case ... of
+%%                             ... -> {17, foo()};
+%%                             ... -> {42, bar()}
+%%                         end
+%%       
+%% will be rewritten to: +%%
+%%                case ... of
+%%                    ... -> X = 17, Y = foo(), {X, Y};
+%%                    ... -> X = 42, Y = bar(), {X, Y}
+%%                end
+%%       
+%% +%%
{auto_list_comp, boolean()}
+%% +%%
If the value is `true', calls to `lists:map/2' and +%% `lists:filter/2' will be rewritten using list comprehensions. +%% The default value is `true'.
+%% +%%
{file, string()}
+%% +%%
Specifies the name of the file from which the source code +%% was taken. This is only used for generation of error +%% reports. The default value is the empty string.
+%% +%%
{idem, boolean()}
+%% +%%
If the value is `true', all options that affect how the +%% code is modified are set to "no changes". For example, to +%% only update guard tests, and nothing else, use the options +%% `[new_guard_tests, idem]'. (Recall that options closer to the +%% beginning of the list have higher precedence.)
+%% +%%
{keep_unused, boolean()}
+%% +%%
If the value is `true', unused functions will +%% not be removed from the code. The default value is +%% `false'.
+%% +%%
{new_guard_tests, boolean()}
+%% +%%
If the value is `true', guard tests will be updated to +%% use the new names, e.g. "`is_integer(X)'" instead of +%% "`integer(X)'". The default value is `true'. See also +%% `old_guard_tests'.
+%% +%%
{no_imports, boolean()}
+%% +%%
If the value is `true', all import statements will be +%% removed and calls to imported functions will be expanded to +%% explicit remote calls. The default value is `false'.
+%% +%%
{old_guard_tests, boolean()}
+%% +%%
If the value is `true', guard tests will be changed to +%% use the old names instead of the new ones, e.g. +%% "`integer(X)'" instead of "`is_integer(X)'". The default +%% value is `false'. This option overrides the `new_guard_tests' +%% option.
+%% +%%
{quiet, boolean()}
+%% +%%
If the value is `true', all information +%% messages and warning messages will be suppressed. The default +%% value is `false'.
+%% +%%
{rename, [{{atom(), atom(), integer()}, +%% {atom(), atom()}}]}
+%% +%%
The value is a list of pairs, associating tuples +%% `{Module, Name, Arity}' with tuples `{NewModule, NewName}', +%% specifying renamings of calls to remote functions. By +%% default, the value is the empty list. +%% +%% The renaming affects only remote calls (also when +%% disguised by import declarations); local calls within a +%% module are not affected, and no function definitions are +%% renamed. Since the arity cannot change, the new name is +%% represented by `{NewModule, NewName}' only. Only +%% calls matching the specified arity will match; multiple +%% entries are necessary for renaming calls to functions that +%% have the same module and function name, but different +%% arities. +%% +%% This option can also be used to override the default +%% renaming of calls which use obsolete function names.
+%% +%%
{verbose, boolean()}
+%% +%%
If the value is `true', progress messages will be output +%% while the program is running, unless the `quiet' option is +%% `true'. The default value is `false'.
+%% +%%
+ +-spec module(erl_syntax:forms(), [term()]) -> erl_syntax:syntaxTree(). + +module(Forms, Opts) when is_list(Forms) -> + module(erl_syntax:form_list(Forms), Opts); +module(Forms, Opts) -> + Opts1 = proplists:expand(module__expansions(), Opts) + ++ module__defaults(), + File = proplists:get_value(file, Opts1, ""), + Forms1 = erl_syntax:flatten_form_list(Forms), + module_1(Forms1, File, Opts1). + +module__defaults() -> + [{auto_export_vars, false}, + {auto_list_comp, true}, + {keep_unused, false}, + {new_guard_tests, true}, + {no_imports, false}, + {old_guard_tests, false}, + {quiet, false}, + {verbose, false}]. + +module__expansions() -> + [{idem, [{auto_export_vars, false}, + {auto_list_comp, false}, + {keep_unused, true}, + {new_guard_tests, false}, + {no_imports, false}, + {old_guard_tests, false}]}]. + +module_1(Forms, File, Opts) -> + Info = analyze_forms(Forms, File), + Module = get_module_name(Info, File), + Attrs = get_module_attributes(Info), + Exports = get_module_exports(Info), + Imports = get_module_imports(Info), + Opts1 = check_imports(Imports, Opts, File), + Fs = erl_syntax:form_list_elements(Forms), + {Names, Defs} = collect_functions(Fs), + Exports1 = check_export_all(Attrs, Names, Exports), + Roots = ordsets:union(ordsets:from_list(Exports1), + hidden_uses(Fs, Imports)), + {Names1, Used, Imported, Defs1} = visit_used(Names, Defs, Roots, + Imports, Module, + Opts1), + Fs1 = update_forms(Fs, Defs1, Imported, Opts1), + Fs2 = filter_forms(Fs1, Names1, Used, Opts1), + rewrite(Forms, erl_syntax:form_list(Fs2)). + +analyze_forms(Forms, File) -> + case catch {ok, erl_syntax_lib:analyze_forms(Forms)} of + {ok, L1} -> + L1; + syntax_error -> + report_error({File, 0, "syntax error."}), + throw(syntax_error); + {'EXIT', R} -> + exit(R); + R -> + throw(R) + end. + +-spec get_module_name([erl_syntax_lib:info_pair()], string()) -> atom(). + +get_module_name(List, File) -> + case lists:keyfind(module, 1, List) of + {module, M} -> + M; + _ -> + report_error({File, 0, + "cannot determine module name."}), + exit(error) + end. + +get_module_attributes(List) -> + case lists:keyfind(attributes, 1, List) of + {attributes, As} -> + As; + _ -> + [] + end. + +-spec get_module_exports([erl_syntax_lib:info_pair()]) -> [{atom(), arity()}]. + +get_module_exports(List) -> + case lists:keyfind(exports, 1, List) of + {exports, Es} -> + Es; + _ -> + [] + end. + +-spec get_module_imports([erl_syntax_lib:info_pair()]) -> [{atom(), atom()}]. + +get_module_imports(List) -> + case lists:keyfind(imports, 1, List) of + {imports, Is} -> + flatten_imports(Is); + _ -> + [] + end. + +compile_attrs(As) -> + lists:append([if is_list(T) -> T; true -> [T] end + || {compile, T} <- As]). + +-spec flatten_imports([{atom(), [atom()]}]) -> [{atom(), atom()}]. + +flatten_imports(Is) -> + [{F, M} || {M, Fs} <- Is, F <- Fs]. + +check_imports(Is, Opts, File) -> + case check_imports_1(lists:sort(Is)) of + true -> + Opts; + false -> + case proplists:get_bool(no_imports, Opts) of + true -> + warn({File, 0, + "conflicting import declarations - " + "will not expand imports."}, + [], Opts), + %% prevent expansion of imports + [{no_imports, false} | Opts]; + false -> + Opts + end + end. + +-spec check_imports_1([{atom(), atom()}]) -> boolean(). + +check_imports_1([{F, M1}, {F, M2} | _Is]) when M1 =/= M2 -> + false; +check_imports_1([_ | Is]) -> + check_imports_1(Is); +check_imports_1([]) -> + true. + +check_export_all(Attrs, Names, Exports) -> + case lists:member(export_all, compile_attrs(Attrs)) of + true -> + Exports ++ sets:to_list(Names); + false -> + Exports + end. + +filter_forms(Fs, Names, Used, Opts) -> + Keep = case proplists:get_bool(keep_unused, Opts) of + true -> + Names; + false -> + Used + end, + [F || F <- Fs, keep_form(F, Keep, Opts)]. + +keep_form(Form, Used, Opts) -> + case erl_syntax:type(Form) of + function -> + N = erl_syntax_lib:analyze_function(Form), + case sets:is_element(N, Used) of + false -> + {F, A} = N, + File = proplists:get_value(file, Opts, ""), + report({File, erl_syntax:get_pos(Form), + "removing unused function `~w/~w'."}, + [F, A], Opts), + false; + true -> + true + end; + attribute -> + case erl_syntax_lib:analyze_attribute(Form) of + {file, _} -> + false; + _ -> + true + end; + error_marker -> + false; + warning_marker -> + false; + eof_marker -> + false; + _ -> + true + end. + +collect_functions(Forms) -> + lists:foldl( + fun (F, {Names, Defs}) -> + case erl_syntax:type(F) of + function -> + N = erl_syntax_lib:analyze_function(F), + {sets:add_element(N, Names), + dict:store(N, {F, []}, Defs)}; + _ -> + {Names, Defs} + end + end, + {sets:new(), dict:new()}, + Forms). + +update_forms([F | Fs], Defs, Imports, Opts) -> + case erl_syntax:type(F) of + function -> + N = erl_syntax_lib:analyze_function(F), + {F1, Fs1} = dict:fetch(N, Defs), + [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, + Opts); + attribute -> + [update_attribute(F, Imports, Opts) + | update_forms(Fs, Defs, Imports, Opts)]; + _ -> + [F | update_forms(Fs, Defs, Imports, Opts)] + end; +update_forms([], _, _, _) -> + []. + +update_attribute(F, Imports, Opts) -> + case erl_syntax_lib:analyze_attribute(F) of + {import, {M, Ns}} -> + Ns1 = ordsets:from_list([N || N <- Ns, + sets:is_element(N, Imports)]), + case ordsets:subtract(ordsets:from_list(Ns), Ns1) of + [] -> + ok; + Names -> + File = proplists:get_value(file, Opts, ""), + report({File, erl_syntax:get_pos(F), + "removing unused imports:~s"}, + [[io_lib:fwrite("\n\t`~w:~w/~w'", [M, N, A]) + || {N, A} <- Names]], Opts) + end, + Is = [make_fname(N) || N <- Ns1], + if Is =:= [] -> + %% This will be filtered out later. + erl_syntax:warning_marker(deleted); + true -> + F1 = erl_syntax:attribute(erl_syntax:atom(import), + [erl_syntax:atom(M), + erl_syntax:list(Is)]), + rewrite(F, F1) + end; + {export, Ns} -> + Es = [make_fname(N) || N <- ordsets:from_list(Ns)], + F1 = erl_syntax:attribute(erl_syntax:atom(export), + [erl_syntax:list(Es)]), + rewrite(F, F1); + _ -> + F + end. + +make_fname({F, A}) -> + erl_syntax:arity_qualifier(erl_syntax:atom(F), + erl_syntax:integer(A)). + +hidden_uses(Fs, Imports) -> + Used = lists:foldl(fun (F, S) -> + case erl_syntax:type(F) of + attribute -> + hidden_uses_1(F, S); + _ -> + S + end + end, + [], Fs), + ordsets:subtract(Used, ordsets:from_list([F || {F, _M} <- Imports])). + +hidden_uses_1(Tree, Used) -> + erl_syntax_lib:fold(fun hidden_uses_2/2, Used, Tree). + +hidden_uses_2(Tree, Used) -> + case erl_syntax:type(Tree) of + application -> + F = erl_syntax:application_operator(Tree), + case erl_syntax:type(F) of + atom -> + As = erl_syntax:application_arguments(Tree), + N = {erl_syntax:atom_value(F), length(As)}, + case is_auto_imported(N) of + true -> + Used; + false -> + ordsets:add_element(N, Used) + end; + _ -> + Used + end; + implicit_fun -> + F = erl_syntax:implicit_fun_name(Tree), + case catch {ok, erl_syntax_lib:analyze_function_name(F)} of + {ok, {Name, Arity} = N} + when is_atom(Name), is_integer(Arity) -> + ordsets:add_element(N, Used); + _ -> + Used + end; + _ -> + Used + end. + +-type fa() :: {atom(), arity()}. +-type context() :: 'guard_expr' | 'guard_test' | 'normal'. + +-record(env, {file :: file:filename(), + module :: atom(), + current :: fa() | 'undefined', + imports = dict:new() :: dict:dict(atom(), atom()), + context = normal :: context(), + verbosity = 1 :: 0 | 1 | 2, + quiet = false :: boolean(), + no_imports = false :: boolean(), + spawn_funs = false :: boolean(), + auto_list_comp = true :: boolean(), + auto_export_vars = false :: boolean(), + new_guard_tests = true :: boolean(), + old_guard_tests = false :: boolean()}). + +-record(st, {varc :: non_neg_integer() | 'undefined', + used = sets:new() :: sets:set({atom(), arity()}), + imported :: sets:set({atom(), arity()}), + vars :: sets:set(atom()) | 'undefined', + functions :: sets:set({atom(), arity()}), + new_forms = [] :: [erl_syntax:syntaxTree()], + rename :: dict:dict(mfa(), {atom(), atom()})}). + +visit_used(Names, Defs, Roots, Imports, Module, Opts) -> + File = proplists:get_value(file, Opts, ""), + NoImports = proplists:get_bool(no_imports, Opts), + Rename = proplists:append_values(rename, Opts), + loop(Roots, sets:new(), Defs, + #env{file = File, + module = Module, + imports = dict:from_list(Imports), + verbosity = verbosity(Opts), + no_imports = NoImports, + spawn_funs = proplists:get_bool(spawn_funs, Opts), + auto_list_comp = proplists:get_bool(auto_list_comp, Opts), + auto_export_vars = proplists:get_bool(auto_export_vars, + Opts), + new_guard_tests = proplists:get_bool(new_guard_tests, + Opts), + old_guard_tests = proplists:get_bool(old_guard_tests, + Opts)}, + #st{used = sets:from_list(Roots), + imported = sets:new(), + functions = Names, + rename = dict:from_list([X || {F1, F2} = X <- Rename, + is_remote_name(F1), + is_atom_pair(F2)])}). + +loop([F | Work], Seen0, Defs0, Env, St0) -> + case sets:is_element(F, Seen0) of + true -> + loop(Work, Seen0, Defs0, Env, St0); + false -> + Seen1 = sets:add_element(F, Seen0), + case dict:find(F, Defs0) of + {ok, {Form, Fs}} -> + Vars = erl_syntax_lib:variables(Form), + Form1 = erl_syntax_lib:annotate_bindings(Form, []), + {Form2, St1} = visit(Form1, Env#env{current = F}, + St0#st{varc = 1, + used = sets:new(), + vars = Vars, + new_forms = []}), + Fs1 = St1#st.new_forms ++ Fs, + Defs1 = dict:store(F, {Form2, Fs1}, Defs0), + Used = St1#st.used, + Work1 = sets:to_list(Used) ++ Work, + St2 = St1#st{used = sets:union(Used, St0#st.used)}, + loop(Work1, Seen1, Defs1, Env, St2); + error -> + %% Quietly ignore any names that have no definition. + loop(Work, Seen1, Defs0, Env, St0) + end + end; +loop([], _, Defs, _, St) -> + {St#st.functions, St#st.used, St#st.imported, Defs}. + +visit(Tree, Env, St0) -> + case erl_syntax:type(Tree) of + application -> + visit_application(Tree, Env, St0); + infix_expr -> + visit_infix_expr(Tree, Env, St0); + prefix_expr -> + visit_prefix_expr(Tree, Env, St0); + implicit_fun -> + visit_implicit_fun(Tree, Env, St0); + clause -> + visit_clause(Tree, Env, St0); + list_comp -> + visit_list_comp(Tree, Env, St0); + match_expr -> + visit_match_expr(Tree, Env, St0); + _ -> + visit_other(Tree, Env, St0) + end. + +visit_other(Tree, Env, St) -> + F = fun (T, S) -> visit(T, Env, S) end, + erl_syntax_lib:mapfold_subtrees(F, St, Tree). + +visit_list(Ts, Env, St0) -> + lists:mapfoldl(fun (T, S) -> visit(T, Env, S) end, St0, Ts). + +visit_implicit_fun(Tree, _Env, St0) -> + F = erl_syntax:implicit_fun_name(Tree), + case catch {ok, erl_syntax_lib:analyze_function_name(F)} of + {ok, {Name, Arity} = N} + when is_atom(Name), is_integer(Arity) -> + Used = sets:add_element(N, St0#st.used), + {Tree, St0#st{used = Used}}; + _ -> + %% symbolic funs do not count as uses of a function + {Tree, St0} + end. + +visit_clause(Tree, Env, St0) -> + %% We do not visit the patterns (for now, anyway). + Ps = erl_syntax:clause_patterns(Tree), + {G, St1} = case erl_syntax:clause_guard(Tree) of + none -> + {none, St0}; + G0 -> + visit(G0, Env#env{context = guard_test}, St0) + end, + {B, St2} = visit_list(erl_syntax:clause_body(Tree), Env, St1), + {rewrite(Tree, erl_syntax:clause(Ps, G, B)), St2}. + +visit_infix_expr(Tree, #env{context = guard_test}, St0) -> + %% Detect transition from guard test to guard expression. + visit_other(Tree, #env{context = guard_expr, file = ""}, St0); +visit_infix_expr(Tree, Env, St0) -> + visit_other(Tree, Env, St0). + +visit_prefix_expr(Tree, #env{context = guard_test}, St0) -> + %% Detect transition from guard test to guard expression. + visit_other(Tree, #env{context = guard_expr, file = ""}, St0); +visit_prefix_expr(Tree, Env, St0) -> + visit_other(Tree, Env, St0). + +visit_application(Tree, Env, St0) -> + Env1 = case Env of + #env{context = guard_test} -> + Env#env{context = guard_expr}; + _ -> + Env + end, + {F, St1} = visit(erl_syntax:application_operator(Tree), Env1, St0), + {As, St2} = visit_list(erl_syntax:application_arguments(Tree), Env1, + St1), + case erl_syntax:type(F) of + atom -> + visit_atom_application(F, As, Tree, Env, St2); + implicit_fun -> + visit_named_fun_application(F, As, Tree, Env, St2); + fun_expr -> + visit_lambda_application(F, As, Tree, Env, St2); + _ -> + visit_nonlocal_application(F, As, Tree, Env, St2) + end. + +visit_application_final(F, As, Tree, St0) -> + {rewrite(Tree, erl_syntax:application(F, As)), St0}. + +revisit_application(F, As, Tree, Env, St0) -> + visit(rewrite(Tree, erl_syntax:application(F, As)), Env, St0). + +visit_atom_application(F, As, Tree, #env{context = guard_test} = Env, + St0) -> + N = erl_syntax:atom_value(F), + A = length(As), + N1 = case Env#env.old_guard_tests of + true -> + reverse_guard_test(N, A); + false -> + case Env#env.new_guard_tests of + true -> + rewrite_guard_test(N, A); + false -> + N + end + end, + if N1 =/= N -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing guard test `~w' to `~w'."}, + [N, N1], Env#env.verbosity); + true -> + ok + end, + %% No need to revisit here. + F1 = rewrite(F, erl_syntax:atom(N1)), + visit_application_final(F1, As, Tree, St0); +visit_atom_application(F, As, Tree, #env{context = guard_expr}, St0) -> + %% Atom applications in guard expressions are never local calls. + visit_application_final(F, As, Tree, St0); +visit_atom_application(F, As, Tree, Env, St0) -> + N = {erl_syntax:atom_value(F), length(As)}, + case is_auto_imported(N) of + true -> + visit_bif_call(N, F, As, Tree, Env, St0); + false -> + case is_imported(N, Env) of + true -> + visit_import_application(N, F, As, Tree, Env, St0); + false -> + Used = sets:add_element(N, St0#st.used), + visit_application_final(F, As, Tree, + St0#st{used = Used}) + end + end. + +visit_import_application({N, A} = Name, F, As, Tree, Env, St0) -> + M = dict:fetch(Name, Env#env.imports), + Expand = case Env#env.no_imports of + true -> + true; + false -> + auto_expand_import({M, N, A}, St0) + end, + case Expand of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "expanding call to imported function `~w:~w/~w'."}, + [M, N, A], Env#env.verbosity), + F1 = erl_syntax:module_qualifier(erl_syntax:atom(M), + erl_syntax:atom(N)), + revisit_application(rewrite(F, F1), As, Tree, Env, St0); + false -> + Is = sets:add_element(Name, St0#st.imported), + visit_application_final(F, As, Tree, St0#st{imported = Is}) + end. + +visit_bif_call({apply, 2}, F, [E, Args] = As, Tree, Env, St0) -> + case erl_syntax:is_proper_list(Args) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing use of `apply/2' " + "to direct function call."}, + [], Env#env.verbosity), + As1 = erl_syntax:list_elements(Args), + revisit_application(E, As1, Tree, Env, St0); + false -> + visit_application_final(F, As, Tree, St0) + end; +visit_bif_call({apply, 3}, F, [M, N, Args] = As, Tree, Env, St0) -> + case erl_syntax:is_proper_list(Args) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing use of `apply/3' " + "to direct remote call."}, + [], Env#env.verbosity), + F1 = rewrite(F, erl_syntax:module_qualifier(M, N)), + As1 = erl_syntax:list_elements(Args), + visit_nonlocal_application(F1, As1, Tree, Env, St0); + false -> + visit_application_final(F, As, Tree, St0) + end; +visit_bif_call({spawn, 3} = N, F, [_, _, _] = As, Tree, Env, St0) -> + visit_spawn_call(N, F, [], As, Tree, Env, St0); +visit_bif_call({spawn_link, 3} = N, F, [_, _, _] = As, Tree, Env, + St0) -> + visit_spawn_call(N, F, [], As, Tree, Env, St0); +visit_bif_call({spawn, 4} = N, F, [A | [_, _, _] = As], Tree, Env, + St0) -> + visit_spawn_call(N, F, [A], As, Tree, Env, St0); +visit_bif_call({spawn_link, 4} = N, F, [A | [_, _, _] = As], Tree, Env, + St0) -> + visit_spawn_call(N, F, [A], As, Tree, Env, St0); +visit_bif_call(_, F, As, Tree, _Env, St0) -> + visit_application_final(F, As, Tree, St0). + +visit_spawn_call({N, A}, F, Ps, [A1, A2, A3] = As, Tree, + #env{spawn_funs = true} = Env, St0) -> + case erl_syntax:is_proper_list(A3) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing use of `~w/~w' to `~w/~w' with a fun."}, + [N, A, N, 1 + length(Ps)], Env#env.verbosity), + F1 = case erl_syntax:is_atom(A1, Env#env.module) of + true -> + A2; % calling self + false -> + clone(A1, + erl_syntax:module_qualifier(A1, A2)) + end, + %% Need to do some scoping tricks here to make sure the + %% arguments are evaluated by the parent, not by the spawned + %% process. + As1 = erl_syntax:list_elements(A3), + {Vs, St1} = new_variables(length(As1), St0), + E1 = clone(F1, erl_syntax:application(F1, Vs)), + C1 = clone(E1, erl_syntax:clause([], [E1])), + E2 = clone(C1, erl_syntax:fun_expr([C1])), + C2 = clone(E2, erl_syntax:clause(Vs, [], [E2])), + E3 = clone(C2, erl_syntax:fun_expr([C2])), + E4 = clone(E3, erl_syntax:application(E3, As1)), + E5 = erl_syntax_lib:annotate_bindings(E4, get_env(A1)), + {E6, St2} = visit(E5, Env, St1), + F2 = rewrite(F, erl_syntax:atom(N)), + visit_nonlocal_application(F2, Ps ++ [E6], Tree, Env, St2); + false -> + visit_application_final(F, Ps ++ As, Tree, St0) + end; +visit_spawn_call(_, F, Ps, As, Tree, _Env, St0) -> + visit_application_final(F, Ps ++ As, Tree, St0). + +visit_named_fun_application(F, As, Tree, Env, St0) -> + Name = erl_syntax:implicit_fun_name(F), + case catch {ok, erl_syntax_lib:analyze_function_name(Name)} of + {ok, {A, N}} when is_atom(A), is_integer(N), N =:= length(As) -> + case is_nonlocal({A, N}, Env) of + true -> + %% Making this a direct call would be an error. + visit_application_final(F, As, Tree, St0); + false -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing application of implicit fun " + "to direct local call."}, + [], Env#env.verbosity), + Used = sets:add_element({A, N}, St0#st.used), + F1 = rewrite(F, erl_syntax:atom(A)), + revisit_application(F1, As, Tree, Env, + St0#st{used = Used}) + end; + _ -> + visit_application_final(F, As, Tree, St0) + end. + +visit_lambda_application(F, As, Tree, Env, St0) -> + A = erl_syntax:fun_expr_arity(F), + case A =:= length(As) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing application of fun-expression " + "to local function call."}, + [], Env#env.verbosity), + {Base, _} = Env#env.current, + Free = [erl_syntax:variable(V) || V <- get_free_vars(F)], + N = length(Free), + A1 = A + N, + {Name, St1} = new_fname({Base, A1}, St0), + Cs = augment_clauses(erl_syntax:fun_expr_clauses(F), Free), + F1 = erl_syntax:atom(Name), + New = rewrite(F, erl_syntax:function(F1, Cs)), + Used = sets:add_element({Name, A1}, St1#st.used), + Forms = [New | St1#st.new_forms], + St2 = St1#st{new_forms = Forms, used = Used}, + visit_application_final(F1, As ++ Free, Tree, St2); + false -> + warn({Env#env.file, erl_syntax:get_pos(F), + "arity mismatch in fun-expression application."}, + [], Env#env.verbosity), + visit_application_final(F, As, Tree, St0) + end. + +augment_clauses(Cs, Vs) -> + [begin + Ps = erl_syntax:clause_patterns(C), + G = erl_syntax:clause_guard(C), + Es = erl_syntax:clause_body(C), + rewrite(C, erl_syntax:clause(Ps ++ Vs, G, Es)) + end + || C <- Cs]. + +visit_nonlocal_application(F, As, Tree, Env, St0) -> + case erl_syntax:type(F) of + tuple -> + case erl_syntax:tuple_elements(F) of + [X1, X2] -> + report({Env#env.file, erl_syntax:get_pos(F), + "changing application of 2-tuple " + "to direct remote call."}, + [], Env#env.verbosity), + F1 = erl_syntax:module_qualifier(X1, X2), + revisit_application(rewrite(F, F1), As, Tree, Env, + St0); + _ -> + visit_application_final(F, As, Tree, St0) + end; + module_qualifier -> + case catch {ok, erl_syntax_lib:analyze_function_name(F)} of + {ok, {M, N}} when is_atom(M), is_atom(N) -> + visit_remote_application({M, N, length(As)}, F, As, + Tree, Env, St0); + _ -> + visit_application_final(F, As, Tree, St0) + end; + _ -> + visit_application_final(F, As, Tree, St0) + end. + +%% --- lists:append/2 and lists:subtract/2 --- +visit_remote_application({lists, append, 2}, F, [A1, A2], Tree, Env, + St0) -> + report({Env#env.file, erl_syntax:get_pos(F), + "replacing call to `lists:append/2' " + "with the `++' operator."}, + [], Env#env.verbosity), + Tree1 = erl_syntax:infix_expr(A1, erl_syntax:operator('++'), A2), + visit(rewrite(Tree, Tree1), Env, St0); +visit_remote_application({lists, subtract, 2}, F, [A1, A2], Tree, Env, + St0) -> + report({Env#env.file, erl_syntax:get_pos(F), + "replacing call to `lists:subtract/2' " + "with the `--' operator."}, + [], Env#env.verbosity), + Tree1 = erl_syntax:infix_expr(A1, erl_syntax:operator('--'), A2), + visit(rewrite(Tree, Tree1), Env, St0); +%% --- lists:map/2 and lists:filter/2 --- +visit_remote_application({lists, filter, 2}, F, [A1, A2] = As, Tree, + Env, St0) -> + case Env#env.auto_list_comp + and (erl_syntax:type(A1) =/= variable) + and (get_var_exports(A1) =:= []) + and (get_var_exports(A2) =:= []) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "replacing call to `lists:filter/2' " + "with a list comprehension."}, + [], Env#env.verbosity), + {V, St1} = new_variable(St0), + G = clone(A2, erl_syntax:generator(V, A2)), + T = clone(A1, erl_syntax:application(A1, [V])), + L = erl_syntax:list_comp(V, [G, T]), + L1 = erl_syntax_lib:annotate_bindings(L, get_env(Tree)), + visit(rewrite(Tree, L1), Env, St1); + false -> + visit_application_final(F, As, Tree, St0) + end; +visit_remote_application({lists, map, 2}, F, [A1, A2] = As, Tree, Env, + St0) -> + case Env#env.auto_list_comp + and (erl_syntax:type(A1) =/= variable) + and (get_var_exports(A1) =:= []) + and (get_var_exports(A2) =:= []) of + true -> + report({Env#env.file, erl_syntax:get_pos(F), + "replacing call to `lists:map/2' " + "with a list comprehension."}, + [], Env#env.verbosity), + {V, St1} = new_variable(St0), + T = clone(A1, erl_syntax:application(A1, [V])), + G = clone(A2, erl_syntax:generator(V, A2)), + L = erl_syntax:list_comp(T, [G]), + L1 = erl_syntax_lib:annotate_bindings(L, get_env(Tree)), + visit(rewrite(Tree, L1), Env, St1); + false -> + visit_application_final(F, As, Tree, St0) + end; +%% --- all other functions --- +visit_remote_application({M, N, A} = Name, F, As, Tree, Env, St) -> + case is_auto_imported(Name) of + true -> + %% We don't remove the qualifier - it might be there for the + %% sake of clarity. + visit_bif_call({N, A}, F, As, Tree, Env, St); + false -> + case rename_remote_call(Name, St) of + {M1, N1} -> + report({Env#env.file, erl_syntax:get_pos(F), + "updating obsolete call to `~w:~w/~w' " + "to use `~w:~w/~w' instead."}, + [M, N, A, M1, N1, A], Env#env.verbosity), + M2 = erl_syntax:atom(M1), + N2 = erl_syntax:atom(N1), + F1 = erl_syntax:module_qualifier(M2, N2), + revisit_application(rewrite(F, F1), As, Tree, Env, + St); + false -> + visit_application_final(F, As, Tree, St) + end + end. + +-spec auto_expand_import(mfa(), #st{}) -> boolean(). + +auto_expand_import({lists, append, 2}, _St) -> true; +auto_expand_import({lists, subtract, 2}, _St) -> true; +auto_expand_import({lists, filter, 2}, _St) -> true; +auto_expand_import({lists, map, 2}, _St) -> true; +auto_expand_import(Name, St) -> + case is_auto_imported(Name) of + true -> + true; + false -> + rename_remote_call(Name, St) =/= false + end. + +visit_list_comp(Tree, Env, St0) -> + Es = erl_syntax:list_comp_body(Tree), + {Es1, St1} = visit_list_comp_body(Es, Env, St0), + {T, St2} = visit(erl_syntax:list_comp_template(Tree), Env, St1), + {rewrite(Tree, erl_syntax:list_comp(T, Es1)), St2}. + +visit_list_comp_body_join(Env) -> + fun (E, St0) -> + case is_generator(E) of + true -> + visit_generator(E, Env, St0); + false -> + visit_filter(E, Env, St0) + end + end. + +visit_list_comp_body(Es, Env, St0) -> + lists:mapfoldl(visit_list_comp_body_join(Env), St0, Es). + +%% 'visit_filter' also handles uninteresting generators. + +visit_filter(E, Env, St0) -> + visit(E, Env, St0). + +%% "interesting" generators have the form V <- [V || ...]; this can be +%% unfolded as long as no bindings become erroneously shadowed. + +visit_generator(G, Env, St0) -> + P = erl_syntax:generator_pattern(G), + case erl_syntax:type(P) of + variable -> + B = erl_syntax:generator_body(G), + case erl_syntax:type(B) of + list_comp -> + T = erl_syntax:list_comp_template(B), + case erl_syntax:type(T) of + variable -> + visit_generator_1(G, Env, St0); + _ -> + visit_filter(G, Env, St0) + end; + _ -> + visit_filter(G, Env, St0) + end; + _ -> + visit_filter(G, Env, St0) + end. + +visit_generator_1(G, Env, St0) -> + recommend({Env#env.file, erl_syntax:get_pos(G), + "unfold that this nested list comprehension can be unfolded " + "by hand to get better efficiency."}, + [], Env#env.verbosity), + visit_filter(G, Env, St0). + +visit_match_expr(Tree, Env, St0) -> + %% We do not visit the pattern (for now, anyway). + P = erl_syntax:match_expr_pattern(Tree), + {B, St1} = visit(erl_syntax:match_expr_body(Tree), Env, St0), + case erl_syntax:type(P) of + tuple -> + Ps = erl_syntax:tuple_elements(P), + case lists:all(fun is_variable/1, Ps) of + true -> + Vs = lists:sort([erl_syntax:variable_name(X) + || X <- Ps]), + case ordsets:is_set(Vs) of + true -> + Xs = get_var_exports(B), + case ordsets:intersection(Vs, Xs) of + [] -> + visit_match_body(Ps, P, B, Tree, + Env, St1); + _ -> + visit_match_expr_final(P, B, Tree, + Env, St1) + end; + false -> + visit_match_expr_final(P, B, Tree, Env, St1) + end; + false -> + visit_match_expr_final(P, B, Tree, Env, St1) + end; + _ -> + visit_match_expr_final(P, B, Tree, Env, St1) + end. + +visit_match_expr_final(P, B, Tree, _Env, St0) -> + {rewrite(Tree, erl_syntax:match_expr(P, B)), St0}. + +visit_match_body(_Ps, P, B, Tree, #env{auto_export_vars = false} = Env, + St0) -> + visit_match_expr_final(P, B, Tree, Env, St0); +visit_match_body(Ps, P, B, Tree, Env, St0) -> + case erl_syntax:type(B) of + case_expr -> + Cs = erl_syntax:case_expr_clauses(B), + case multival_clauses(Cs, length(Ps), Ps) of + {true, Cs1} -> + report_export_vars(Env#env.file, + erl_syntax:get_pos(B), + "case", Env#env.verbosity), + A = erl_syntax:case_expr_argument(B), + Tree1 = erl_syntax:case_expr(A, Cs1), + {rewrite(Tree, Tree1), St0}; + false -> + visit_match_expr_final(P, B, Tree, Env, St0) + end; + if_expr -> + Cs = erl_syntax:if_expr_clauses(B), + case multival_clauses(Cs, length(Ps), Ps) of + {true, Cs1} -> + report_export_vars(Env#env.file, + erl_syntax:get_pos(B), + "if", Env#env.verbosity), + Tree1 = erl_syntax:if_expr(Cs1), + {rewrite(Tree, Tree1), St0}; + false -> + visit_match_expr_final(P, B, Tree, Env, St0) + end; + cond_expr -> + Cs = erl_syntax:cond_expr_clauses(B), + case multival_clauses(Cs, length(Ps), Ps) of + {true, Cs1} -> + report_export_vars(Env#env.file, + erl_syntax:get_pos(B), + "cond", Env#env.verbosity), + Tree1 = erl_syntax:cond_expr(Cs1), + {rewrite(Tree, Tree1), St0}; + false -> + visit_match_expr_final(P, B, Tree, Env, St0) + end; + receive_expr -> + %% Handle the timeout case as an extra clause. + As = erl_syntax:receive_expr_action(B), + C = erl_syntax:clause([], As), + Cs = erl_syntax:receive_expr_clauses(B), + case multival_clauses([C | Cs], length(Ps), Ps) of + {true, [C1 | Cs1]} -> + report_export_vars(Env#env.file, + erl_syntax:get_pos(B), + "receive", Env#env.verbosity), + T = erl_syntax:receive_expr_timeout(B), + As1 = erl_syntax:clause_body(C1), + Tree1 = erl_syntax:receive_expr(Cs1, T, As1), + {rewrite(Tree, Tree1), St0}; + false -> + visit_match_expr_final(P, B, Tree, Env, St0) + end; + _ -> + visit_match_expr_final(P, B, Tree, Env, St0) + end. + +multival_clauses(Cs, N, Vs) -> + multival_clauses(Cs, N, Vs, []). + +multival_clauses([C | Cs], N, Vs, Cs1) -> + case erl_syntax:clause_body(C) of + [] -> + false; + Es -> + E = lists:last(Es), + case erl_syntax:type(E) of + tuple -> + Ts = erl_syntax:tuple_elements(E), + if length(Ts) =:= N -> + Bs = make_matches(E, Vs, Ts), + Es1 = replace_last(Es, Bs), + Ps = erl_syntax:clause_patterns(C), + G = erl_syntax:clause_guard(C), + C1 = erl_syntax:clause(Ps, G, Es1), + multival_clauses(Cs, N, Vs, + [rewrite(C, C1) | Cs1]); + true -> + false + end; + _ -> + case erl_syntax_lib:is_fail_expr(E) of + true -> + %% We must add dummy bindings here so we + %% don't introduce compilation errors due to + %% "unsafe" variable exports. + Bs = make_matches(Vs, + erl_syntax:atom(false)), + Es1 = replace_last(Es, Bs ++ [E]), + Ps = erl_syntax:clause_patterns(C), + G = erl_syntax:clause_guard(C), + C1 = erl_syntax:clause(Ps, G, Es1), + multival_clauses(Cs, N, Vs, + [rewrite(C, C1) | Cs1]); + false -> + false + end + end + end; +multival_clauses([], _N, _Vs, Cs) -> + {true, lists:reverse(Cs)}. + +make_matches(E, Vs, Ts) -> + case make_matches(Vs, Ts) of + [] -> + []; + [B | Bs] -> + [rewrite(E, B) | Bs] % preserve comments on E (but not B) + end. + +make_matches([V | Vs], [T | Ts]) -> + [erl_syntax:match_expr(V, T) | make_matches(Vs, Ts)]; +make_matches([V | Vs], T) when T =/= [] -> + [erl_syntax:match_expr(V, T) | make_matches(Vs, T)]; +make_matches([], _) -> + []. + +rename_remote_call(F, St) -> + case dict:find(F, St#st.rename) of + error -> + rename_remote_call_1(F); + {ok, F1} -> F1 + end. + +-spec rename_remote_call_1(mfa()) -> {atom(), atom()} | 'false'. + +rename_remote_call_1({dict, dict_to_list, 1}) -> {dict, to_list}; +rename_remote_call_1({dict, list_to_dict, 1}) -> {dict, from_list}; +rename_remote_call_1({erl_eval, arg_list, 2}) -> {erl_eval, expr_list}; +rename_remote_call_1({erl_eval, arg_list, 3}) -> {erl_eval, expr_list}; +rename_remote_call_1({erl_eval, seq, 2}) -> {erl_eval, exprs}; +rename_remote_call_1({erl_eval, seq, 3}) -> {erl_eval, exprs}; +rename_remote_call_1({erl_pp, seq, 1}) -> {erl_eval, seq}; +rename_remote_call_1({erl_pp, seq, 2}) -> {erl_eval, seq}; +rename_remote_call_1({erlang, info, 1}) -> {erlang, system_info}; +rename_remote_call_1({io, parse_erl_seq, 1}) -> {io, parse_erl_exprs}; +rename_remote_call_1({io, parse_erl_seq, 2}) -> {io, parse_erl_exprs}; +rename_remote_call_1({io, parse_erl_seq, 3}) -> {io, parse_erl_exprs}; +rename_remote_call_1({io, scan_erl_seq, 1}) -> {io, scan_erl_exprs}; +rename_remote_call_1({io, scan_erl_seq, 2}) -> {io, scan_erl_exprs}; +rename_remote_call_1({io, scan_erl_seq, 3}) -> {io, scan_erl_exprs}; +rename_remote_call_1({io_lib, reserved_word, 1}) -> {erl_scan, reserved_word}; +rename_remote_call_1({io_lib, scan, 1}) -> {erl_scan, string}; +rename_remote_call_1({io_lib, scan, 2}) -> {erl_scan, string}; +rename_remote_call_1({io_lib, scan, 3}) -> {erl_scan, tokens}; +rename_remote_call_1({orddict, dict_to_list, 1}) -> {orddict, to_list}; +rename_remote_call_1({orddict, list_to_dict, 1}) -> {orddict, from_list}; +rename_remote_call_1({ordsets, list_to_set, 1}) -> {ordsets, from_list}; +rename_remote_call_1({ordsets, new_set, 0}) -> {ordsets, new}; +rename_remote_call_1({ordsets, set_to_list, 1}) -> {ordsets, to_list}; +rename_remote_call_1({ordsets, subset, 2}) -> {ordsets, is_subset}; +rename_remote_call_1({sets, list_to_set, 1}) -> {sets, from_list}; +rename_remote_call_1({sets, new_set, 0}) -> {sets, new}; +rename_remote_call_1({sets, set_to_list, 1}) -> {sets, to_list}; +rename_remote_call_1({sets, subset, 2}) -> {sets, is_subset}; +rename_remote_call_1({string, index, 2}) -> {string, str}; +rename_remote_call_1({unix, cmd, 1}) -> {os, cmd}; +rename_remote_call_1(_) -> false. + +-spec rewrite_guard_test(atom(), arity()) -> atom(). + +rewrite_guard_test(atom, 1) -> is_atom; +rewrite_guard_test(binary, 1) -> is_binary; +rewrite_guard_test(constant, 1) -> is_constant; +rewrite_guard_test(float, 1) -> is_float; +rewrite_guard_test(function, 1) -> is_function; +rewrite_guard_test(function, 2) -> is_function; +rewrite_guard_test(integer, 1) -> is_integer; +rewrite_guard_test(list, 1) -> is_list; +rewrite_guard_test(number, 1) -> is_number; +rewrite_guard_test(pid, 1) -> is_pid; +rewrite_guard_test(port, 1) -> is_port; +rewrite_guard_test(reference, 1) -> is_reference; +rewrite_guard_test(tuple, 1) -> is_tuple; +rewrite_guard_test(record, 2) -> is_record; +rewrite_guard_test(record, 3) -> is_record; +rewrite_guard_test(N, _A) -> N. + +-spec reverse_guard_test(atom(), arity()) -> atom(). + +reverse_guard_test(is_atom, 1) -> atom; +reverse_guard_test(is_binary, 1) -> binary; +reverse_guard_test(is_constant, 1) -> constant; +reverse_guard_test(is_float, 1) -> float; +reverse_guard_test(is_function, 1) -> function; +reverse_guard_test(is_function, 2) -> function; +reverse_guard_test(is_integer, 1) -> integer; +reverse_guard_test(is_list, 1) -> list; +reverse_guard_test(is_number, 1) -> number; +reverse_guard_test(is_pid, 1) -> pid; +reverse_guard_test(is_port, 1) -> port; +reverse_guard_test(is_reference, 1) -> reference; +reverse_guard_test(is_tuple, 1) -> tuple; +reverse_guard_test(is_record, 2) -> record; +reverse_guard_test(is_record, 3) -> record; +reverse_guard_test(N, _A) -> N. + + +%% ===================================================================== +%% Utility functions + +is_remote_name({M,F,A}) when is_atom(M), is_atom(F), is_integer(A) -> true; +is_remote_name(_) -> false. + +is_atom_pair({M,F}) when is_atom(M), is_atom(F) -> true; +is_atom_pair(_) -> false. + +replace_last([_E], Xs) -> + Xs; +replace_last([E | Es], Xs) -> + [E | replace_last(Es, Xs)]. + +is_generator(E) -> + erl_syntax:type(E) =:= generator. + +is_variable(E) -> + erl_syntax:type(E) =:= variable. + +new_variables(N, St0) when N > 0 -> + {V, St1} = new_variable(St0), + {Vs, St2} = new_variables(N - 1, St1), + {[V | Vs], St2}; +new_variables(0, St) -> + {[], St}. + +new_variable(St0) -> + Fun = fun (N) -> + list_to_atom("V" ++ integer_to_list(N)) + end, + Vs = St0#st.vars, + {Name, N} = new_name(St0#st.varc, Fun, Vs), + St1 = St0#st{varc = N + 1, vars = sets:add_element(Name, Vs)}, + {erl_syntax:variable(Name), St1}. + +new_fname({F, A}, St0) -> + Base = atom_to_list(F), + Fun = fun (N) -> + {list_to_atom(Base ++ "_" ++ integer_to_list(N)), A} + end, + Fs = St0#st.functions, + {{F1, _A} = Name, _N} = new_name(1, Fun, Fs), + {F1, St0#st{functions = sets:add_element(Name, Fs)}}. + +new_name(N, F, Set) -> + Name = F(N), + case sets:is_element(Name, Set) of + true -> + new_name(N + 1, F, Set); + false -> + {Name, N} + end. + +is_imported(F, Env) -> + dict:is_key(F, Env#env.imports). + +is_auto_imported({erlang, N, A}) -> + is_auto_imported({N, A}); +is_auto_imported({_, _N, _A}) -> + false; +is_auto_imported({N, A}) -> + erl_internal:bif(N, A). + +is_nonlocal(N, Env) -> + case is_imported(N, Env) of + true -> + true; + false -> + is_auto_imported(N) + end. + +get_var_exports(Node) -> + get_var_exports_1(erl_syntax:get_ann(Node)). + +get_var_exports_1([{bound, B} | _Bs]) -> B; +get_var_exports_1([_ | Bs]) -> get_var_exports_1(Bs); +get_var_exports_1([]) -> []. + +get_free_vars(Node) -> + get_free_vars_1(erl_syntax:get_ann(Node)). + +get_free_vars_1([{free, B} | _Bs]) -> B; +get_free_vars_1([_ | Bs]) -> get_free_vars_1(Bs); +get_free_vars_1([]) -> []. + +filename([C | T]) when is_integer(C), C > 0 -> + [C | filename(T)]; +filename([H|T]) -> + filename(H) ++ filename(T); +filename([]) -> + []; +filename(N) when is_atom(N) -> + atom_to_list(N); +filename(N) -> + report_error("bad filename: `~P'.", [N, 25]), + exit(error). + +get_env(Tree) -> + case lists:keyfind(env, 1, erl_syntax:get_ann(Tree)) of + {env, Env} -> + Env; + _ -> + [] + end. + +rewrite(Source, Target) -> + erl_syntax:copy_attrs(Source, Target). + +clone(Source, Target) -> + erl_syntax:copy_pos(Source, Target). + + +%% ===================================================================== +%% Reporting + +report_export_vars(F, L, Type, Opts) -> + report({F, L, "rewrote ~s-expression to export variables."}, + [Type], Opts). + +error_read_file(Name) -> + report_error("error reading file `~ts'.", [filename(Name)]). + +error_write_file(Name) -> + report_error("error writing to file `~ts'.", [filename(Name)]). + +error_backup_file(Name) -> + report_error("could not create backup of file `~ts'.", + [filename(Name)]). + +error_open_output(Name) -> + report_error("cannot open file `~ts' for output.", [filename(Name)]). + +verbosity(Opts) -> + case proplists:get_bool(quiet, Opts) of + true -> 0; + false -> + case proplists:get_value(verbose, Opts) of + true -> 2; + N when is_integer(N) -> N; + _ -> 1 + end + end. + +report_error(D) -> + report_error(D, []). + +report_error({F, L, D}, Vs) -> + report({F, L, {error, D}}, Vs); +report_error(D, Vs) -> + report({error, D}, Vs). + +%% warn(D, N) -> +%% warn(D, [], N). + +warn({F, L, D}, Vs, N) -> + report({F, L, {warning, D}}, Vs, N); +warn(D, Vs, N) -> + report({warning, D}, Vs, N). + +recommend(D, Vs, N) -> + report({recommend, D}, Vs, N). + +verbose(D, Vs, N) -> + report(2, D, Vs, N). + +report(D, Vs) -> + report(D, Vs, 1). + +report(D, Vs, N) -> + report(1, D, Vs, N). + +report(Level, _D, _Vs, N) when is_integer(N), N < Level -> + ok; +report(_Level, D, Vs, N) when is_integer(N) -> + io:put_chars(format(D, Vs)); +report(Level, D, Vs, Options) when is_list(Options) -> + report(Level, D, Vs, verbosity(Options)). + +format({error, D}, Vs) -> + ["error: ", format(D, Vs)]; +format({warning, D}, Vs) -> + ["warning: ", format(D, Vs)]; +format({recommend, D}, Vs) -> + ["recommendation: ", format(D, Vs)]; +format({"", L, D}, Vs) when is_integer(L), L > 0 -> + [io_lib:fwrite("~w: ", [L]), format(D, Vs)]; +format({"", _L, D}, Vs) -> + format(D, Vs); +format({F, L, D}, Vs) when is_integer(L), L > 0 -> + [io_lib:fwrite("~ts:~w: ", [filename(F), L]), format(D, Vs)]; +format({F, _L, D}, Vs) -> + [io_lib:fwrite("~ts: ", [filename(F)]), format(D, Vs)]; +format(S, Vs) when is_list(S) -> + [io_lib:fwrite(S, Vs), $\n]. + +%% ===================================================================== diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl new file mode 100644 index 0000000..afb3875 --- /dev/null +++ b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl @@ -0,0 +1,73 @@ +-module(erl_tidy_prv_fmt). + +-behaviour(provider). + +-export([init/1, + do/1, + format_error/1]). + +-define(PROVIDER, fmt). +-define(DEPS, [app_discovery]). + +%% =================================================================== +%% Public API +%% =================================================================== + +-spec init(rebar_state:state()) -> {ok, rebar_state:state()}. +init(State) -> + State1 = rebar_state:add_provider(State, providers:create([{name, ?PROVIDER}, + {module, ?MODULE}, + {bare, false}, + {deps, ?DEPS}, + {example, "rebar3 fmt"}, + {short_desc, "format modules."}, + {desc, ""}, + {opts, fmt_opts()}])), + {ok, State1}. + +-spec do(rebar_state:state()) -> {ok, rebar_state:state()}. +do(State) -> + ConfigFileOpts = rebar_state:get(State, fmt_opts, []), + {CliOpts, _} = rebar_state:command_parsed_args(State), + % CLI opts take precedence over config file + Opts = rebar_utils:tup_umerge(ConfigFileOpts, CliOpts), + + ProjectApps = rebar_state:project_apps(State), + format_apps(Opts, ProjectApps), + {ok, State}. + +-spec format_error(any()) -> iolist(). +format_error(Reason) -> + io_lib:format("~p", [Reason]). + +format_apps(Opts, Apps) -> + lists:foreach(fun(AppInfo) -> + SrcDir = filename:join(rebar_app_info:dir(AppInfo), "src"), + rebar_log:log(info, "Formating ~s...", [rebar_app_info:name(AppInfo)]), + erl_tidy:dir(SrcDir, Opts) + end, Apps). + +fmt_opts() -> + [{test, undefined, "test", {boolean, false}, + "do not modify files"}, + {verbose, undefined, "verbose", {boolean, true}, + "progress messages will be output while the program is running, " + "unless the `quiet' option is set"}, + {quiet, undefined, "quiet", {boolean, false}, + "all information messages and warning messages will be suppressed"}, + {auto_list_comp, undefined, "auto_list_comp", {boolean, false}, + "calls to `lists:map/2' and `lists:filter/2' will be rewritten " + "using list comprehensions"}, + {keep_unused, undefined, "keep_unused", {boolean, false}, + "unused functions will not be removed from the code"}, + {new_guard_tests, undefined, "new_guard_tests", {boolean, true}, + "guard tests will be updated to use the new names, " + "e.g. `is_integer(X)' instead of `integer(X)'"}, + {old_guard_tests, undefined, "old_guard_tests", {boolean, false}, + "guard tests will be changed to use the old names " + "instead of the new ones, e.g. `integer(X)' instead of `is_integer(X)'"}, + {no_imports, undefined, "no_imports", {boolean, false}, + "all import statements will be removed " + "and calls to imported functions will be expanded " + "to explicit remote calls"} + ]. diff --git a/rebar.config b/rebar.config index 7d2a49a..8c358a8 100644 --- a/rebar.config +++ b/rebar.config @@ -10,6 +10,8 @@ warn_export_all ]}. +{plugins, [{erl_tidy_prv_fmt, ".*", {git, "git://github.com/tsloughter/erl_tidy.git", {branch, "master"}}}]}. + {deps, [ {cowboy, "2.8.0",{git, "https://github.com/ninenines/cowboy.git", {tag, "2.8.0"}}} ]}. diff --git a/src/mochijson2_fork.erl b/src/mochijson2_fork.erl index a088d9d..50ad5c9 100644 --- a/src/mochijson2_fork.erl +++ b/src/mochijson2_fork.erl @@ -65,31 +65,41 @@ %% -module(mochijson2_fork). + -author('bob@mochimedia.com'). --export([encoder/1, encode/1]). --export([decoder/1, decode/1, decode/2]). + +-export([encode/1, encoder/1]). + +-export([decode/1, decode/2, decoder/1]). %% This is a macro to placate syntax highlighters.. --define(Q, $\"). --define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset, - column=N+S#decoder.column}). --define(INC_COL(S), S#decoder{offset=1+S#decoder.offset, - column=1+S#decoder.column}). --define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset, - column=1, - line=1+S#decoder.line}). +-define(Q, $"). + +-define(ADV_COL(S, N), + S#decoder{offset = N + S#decoder.offset, + column = N + S#decoder.column}). + +-define(INC_COL(S), + S#decoder{offset = 1 + S#decoder.offset, + column = 1 + S#decoder.column}). + +-define(INC_LINE(S), + S#decoder{offset = 1 + S#decoder.offset, column = 1, + line = 1 + S#decoder.line}). + -define(INC_CHAR(S, C), - case C of - $\n -> - S#decoder{column=1, - line=1+S#decoder.line, - offset=1+S#decoder.offset}; - _ -> - S#decoder{column=1+S#decoder.column, - offset=1+S#decoder.offset} - end). + case C of + $\n -> + S#decoder{column = 1, line = 1 + S#decoder.line, + offset = 1 + S#decoder.offset}; + _ -> + S#decoder{column = 1 + S#decoder.column, + offset = 1 + S#decoder.offset} + end). + -define(IS_WHITESPACE(C), - (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)). + C =:= $\s orelse + C =:= $\t orelse C =:= $\r orelse C =:= $\n). %% -type(decoder_option() :: any()). %% -type(handler_option() :: any()). @@ -103,14 +113,11 @@ %% -type(json_term() :: json_string() | json_number() | json_array() | %% json_object() | json_eep18_object() | json_iolist()). --record(encoder, {handler=null, - utf8=false}). +-record(encoder, {handler = null, utf8 = false}). --record(decoder, {object_hook=null, - offset=0, - line=1, - column=1, - state=null}). +-record(decoder, + {object_hook = null, offset = 0, line = 1, column = 1, + state = null}). %% -type(utf8_option() :: boolean()). %% -type(encoder_option() :: handler_option() | utf8_option()). @@ -123,8 +130,7 @@ encoder(Options) -> %% -spec encode(json_term()) -> iolist(). %% @doc Encode the given as JSON to an iolist. -encode(Any) -> - json_encode(Any, #encoder{}). +encode(Any) -> json_encode(Any, #encoder{}). %% -spec decoder([decoder_option()]) -> function(). %% @doc Create a decoder/1 with the given options. @@ -138,47 +144,50 @@ decoder(Options) -> %% proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct %% returns them as-is. decode(S, Options) -> - json_decode(S, parse_decoder_options(Options, #decoder{})). + json_decode(S, + parse_decoder_options(Options, #decoder{})). %% -spec decode(iolist()) -> json_term(). %% @doc Decode the given iolist to Erlang terms. -decode(S) -> - json_decode(S, #decoder{}). +decode(S) -> json_decode(S, #decoder{}). %% Internal API -parse_encoder_options([], State) -> - State; -parse_encoder_options([{handler, Handler} | Rest], State) -> - parse_encoder_options(Rest, State#encoder{handler=Handler}); +parse_encoder_options([], State) -> State; +parse_encoder_options([{handler, Handler} | Rest], + State) -> + parse_encoder_options(Rest, + State#encoder{handler = Handler}); parse_encoder_options([{utf8, Switch} | Rest], State) -> - parse_encoder_options(Rest, State#encoder{utf8=Switch}). - -parse_decoder_options([], State) -> - State; -parse_decoder_options([{object_hook, Hook} | Rest], State) -> - parse_decoder_options(Rest, State#decoder{object_hook=Hook}); + parse_encoder_options(Rest, + State#encoder{utf8 = Switch}). + +parse_decoder_options([], State) -> State; +parse_decoder_options([{object_hook, Hook} | Rest], + State) -> + parse_decoder_options(Rest, + State#decoder{object_hook = Hook}); parse_decoder_options([{format, Format} | Rest], State) - when Format =:= struct orelse Format =:= eep18 orelse Format =:= proplist -> - parse_decoder_options(Rest, State#decoder{object_hook=Format}). - -json_encode(true, _State) -> - <<"true">>; -json_encode(false, _State) -> - <<"false">>; -json_encode(null, _State) -> - <<"null">>; + when Format =:= struct orelse + Format =:= eep18 orelse Format =:= proplist -> + parse_decoder_options(Rest, + State#decoder{object_hook = Format}). + +json_encode(true, _State) -> <<"true">>; +json_encode(false, _State) -> <<"false">>; +json_encode(null, _State) -> <<"null">>; json_encode(I, _State) when is_integer(I) -> integer_to_list(I); json_encode(F, _State) when is_float(F) -> mochinum_fork:digits(F); json_encode(S, State) when is_binary(S); is_atom(S) -> json_encode_string(S, State); -json_encode([{K, _}|_] = Props, State) when (K =/= struct andalso - K =/= array andalso - K =/= json) -> +json_encode([{K, _} | _] = Props, State) + when K =/= struct andalso + K =/= array andalso K =/= json -> json_encode_proplist(Props, State); -json_encode({struct, Props}, State) when is_list(Props) -> +json_encode({struct, Props}, State) + when is_list(Props) -> json_encode_proplist(Props, State); json_encode({Props}, State) when is_list(Props) -> json_encode_proplist(Props, State); @@ -186,742 +195,421 @@ json_encode({}, State) -> json_encode_proplist([], State); json_encode(Array, State) when is_list(Array) -> json_encode_array(Array, State); -json_encode({array, Array}, State) when is_list(Array) -> +json_encode({array, Array}, State) + when is_list(Array) -> json_encode_array(Array, State); -json_encode({json, IoList}, _State) -> - IoList; -json_encode(Bad, #encoder{handler=null}) -> +json_encode({json, IoList}, _State) -> IoList; +json_encode(Bad, #encoder{handler = null}) -> exit({json_encode, {bad_term, Bad}}); -json_encode(Bad, State=#encoder{handler=Handler}) -> +json_encode(Bad, State = #encoder{handler = Handler}) -> json_encode(Handler(Bad), State). -json_encode_array([], _State) -> - <<"[]">>; +json_encode_array([], _State) -> <<"[]">>; json_encode_array(L, State) -> - F = fun (O, Acc) -> - [$,, json_encode(O, State) | Acc] - end, + F = fun (O, Acc) -> [$,, json_encode(O, State) | Acc] + end, [$, | Acc1] = lists:foldl(F, "[", L), - lists:reverse([$\] | Acc1]). + lists:reverse([$] | Acc1]). -json_encode_proplist([], _State) -> - <<"{}">>; +json_encode_proplist([], _State) -> <<"{}">>; json_encode_proplist(Props, State) -> F = fun ({K, V}, Acc) -> - KS = json_encode_string(K, State), - VS = json_encode(V, State), - [$,, VS, $:, KS | Acc] - end, + KS = json_encode_string(K, State), + VS = json_encode(V, State), + [$,, VS, $:, KS | Acc] + end, [$, | Acc1] = lists:foldl(F, "{", Props), - lists:reverse([$\} | Acc1]). + lists:reverse([$} | Acc1]). json_encode_string(A, State) when is_atom(A) -> L = atom_to_list(A), case json_string_is_safe(L) of - true -> - [?Q, L, ?Q]; - false -> - json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q]) + true -> [?Q, L, ?Q]; + false -> + json_encode_string_unicode(xmerl_ucs:from_utf8(L), + State, [?Q]) end; json_encode_string(B, State) when is_binary(B) -> case json_bin_is_safe(B) of - true -> - [?Q, B, ?Q]; - false -> - json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q]) + true -> [?Q, B, ?Q]; + false -> + json_encode_string_unicode(xmerl_ucs:from_utf8(B), + State, [?Q]) end; json_encode_string(I, _State) when is_integer(I) -> [?Q, integer_to_list(I), ?Q]; json_encode_string(L, State) when is_list(L) -> case json_string_is_safe(L) of - true -> - [?Q, L, ?Q]; - false -> - json_encode_string_unicode(L, State, [?Q]) + true -> [?Q, L, ?Q]; + false -> json_encode_string_unicode(L, State, [?Q]) end. -json_string_is_safe([]) -> - true; +json_string_is_safe([]) -> true; json_string_is_safe([C | Rest]) -> case C of - ?Q -> - false; - $\\ -> - false; - $\b -> - false; - $\f -> - false; - $\n -> - false; - $\r -> - false; - $\t -> - false; - C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF -> - false; - C when C < 16#7f -> - json_string_is_safe(Rest); - _ -> - false + ?Q -> false; + $\\ -> false; + $\b -> false; + $\f -> false; + $\n -> false; + $\r -> false; + $\t -> false; + C when C >= 0, C < $\s; C >= 127, C =< 1114111 -> false; + C when C < 127 -> json_string_is_safe(Rest); + _ -> false end. -json_bin_is_safe(<<>>) -> - true; +json_bin_is_safe(<<>>) -> true; json_bin_is_safe(<>) -> case C of - ?Q -> - false; - $\\ -> - false; - $\b -> - false; - $\f -> - false; - $\n -> - false; - $\r -> - false; - $\t -> - false; - C when C >= 0, C < $\s; C >= 16#7f -> - false; - C when C < 16#7f -> - json_bin_is_safe(Rest) + ?Q -> false; + $\\ -> false; + $\b -> false; + $\f -> false; + $\n -> false; + $\r -> false; + $\t -> false; + C when C >= 0, C < $\s; C >= 127 -> false; + C when C < 127 -> json_bin_is_safe(Rest) end. json_encode_string_unicode([], _State, Acc) -> - lists:reverse([$\" | Acc]); + lists:reverse([$" | Acc]); json_encode_string_unicode([C | Cs], State, Acc) -> Acc1 = case C of - ?Q -> - [?Q, $\\ | Acc]; - %% Escaping solidus is only useful when trying to protect - %% against "" injection attacks which are only - %% possible when JSON is inserted into a HTML document - %% in-line. mochijson2 does not protect you from this, so - %% if you do insert directly into HTML then you need to - %% uncomment the following case or escape the output of encode. - %% - %% $/ -> - %% [$/, $\\ | Acc]; - %% - $\\ -> - [$\\, $\\ | Acc]; - $\b -> - [$b, $\\ | Acc]; - $\f -> - [$f, $\\ | Acc]; - $\n -> - [$n, $\\ | Acc]; - $\r -> - [$r, $\\ | Acc]; - $\t -> - [$t, $\\ | Acc]; - C when C >= 0, C < $\s -> - [unihex(C) | Acc]; - C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 -> - [xmerl_ucs:to_utf8(C) | Acc]; - C when C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 -> - [unihex(C) | Acc]; - C when C < 16#7f -> - [C | Acc]; - _ -> - exit({json_encode, {bad_char, C}}) - end, + ?Q -> [?Q, $\\ | Acc]; + %% Escaping solidus is only useful when trying to protect + %% against "" injection attacks which are only + %% possible when JSON is inserted into a HTML document + %% in-line. mochijson2 does not protect you from this, so + %% if you do insert directly into HTML then you need to + %% uncomment the following case or escape the output of encode. + %% + %% $/ -> + %% [$/, $\\ | Acc]; + %% + $\\ -> [$\\, $\\ | Acc]; + $\b -> [$b, $\\ | Acc]; + $\f -> [$f, $\\ | Acc]; + $\n -> [$n, $\\ | Acc]; + $\r -> [$r, $\\ | Acc]; + $\t -> [$t, $\\ | Acc]; + C when C >= 0, C < $\s -> [unihex(C) | Acc]; + C when C >= 127, C =< 1114111, State#encoder.utf8 -> + [xmerl_ucs:to_utf8(C) | Acc]; + C when C >= 127, C =< 1114111, not State#encoder.utf8 -> + [unihex(C) | Acc]; + C when C < 127 -> [C | Acc]; + _ -> exit({json_encode, {bad_char, C}}) + end, json_encode_string_unicode(Cs, State, Acc1). -hexdigit(C) when C >= 0, C =< 9 -> - C + $0; -hexdigit(C) when C =< 15 -> - C + $a - 10. +hexdigit(C) when C >= 0, C =< 9 -> C + $0; +hexdigit(C) when C =< 15 -> C + $a - 10. -unihex(C) when C < 16#10000 -> +unihex(C) when C < 65536 -> <> = <>, Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]], [$\\, $u | Digits]; -unihex(C) when C =< 16#10FFFF -> - N = C - 16#10000, - S1 = 16#d800 bor ((N bsr 10) band 16#3ff), - S2 = 16#dc00 bor (N band 16#3ff), +unihex(C) when C =< 1114111 -> + N = C - 65536, + S1 = 55296 bor (N bsr 10) band 1023, + S2 = 56320 bor N band 1023, [unihex(S1), unihex(S2)]. json_decode(L, S) when is_list(L) -> json_decode(iolist_to_binary(L), S); json_decode(B, S) -> {Res, S1} = decode1(B, S), - {eof, _} = tokenize(B, S1#decoder{state=trim}), + {eof, _} = tokenize(B, S1#decoder{state = trim}), Res. -decode1(B, S=#decoder{state=null}) -> - case tokenize(B, S#decoder{state=any}) of - {{const, C}, S1} -> - {C, S1}; - {start_array, S1} -> - decode_array(B, S1); - {start_object, S1} -> - decode_object(B, S1) +decode1(B, S = #decoder{state = null}) -> + case tokenize(B, S#decoder{state = any}) of + {{const, C}, S1} -> {C, S1}; + {start_array, S1} -> decode_array(B, S1); + {start_object, S1} -> decode_object(B, S1) end. -make_object(V, #decoder{object_hook=N}) when N =:= null orelse N =:= struct -> +make_object(V, #decoder{object_hook = N}) + when N =:= null orelse N =:= struct -> V; -make_object({struct, P}, #decoder{object_hook=eep18}) -> +make_object({struct, P}, + #decoder{object_hook = eep18}) -> {P}; -make_object({struct, P}, #decoder{object_hook=proplist}) -> +make_object({struct, P}, + #decoder{object_hook = proplist}) -> P; -make_object(V, #decoder{object_hook=Hook}) -> - Hook(V). +make_object(V, #decoder{object_hook = Hook}) -> Hook(V). decode_object(B, S) -> - decode_object(B, S#decoder{state=key}, []). + decode_object(B, S#decoder{state = key}, []). -decode_object(B, S=#decoder{state=key}, Acc) -> +decode_object(B, S = #decoder{state = key}, Acc) -> case tokenize(B, S) of - {end_object, S1} -> - V = make_object({struct, lists:reverse(Acc)}, S1), - {V, S1#decoder{state=null}}; - {{const, K}, S1} -> - {colon, S2} = tokenize(B, S1), - {V, S3} = decode1(B, S2#decoder{state=null}), - decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc]) + {end_object, S1} -> + V = make_object({struct, lists:reverse(Acc)}, S1), + {V, S1#decoder{state = null}}; + {{const, K}, S1} -> + {colon, S2} = tokenize(B, S1), + {V, S3} = decode1(B, S2#decoder{state = null}), + decode_object(B, S3#decoder{state = comma}, + [{K, V} | Acc]) end; -decode_object(B, S=#decoder{state=comma}, Acc) -> +decode_object(B, S = #decoder{state = comma}, Acc) -> case tokenize(B, S) of - {end_object, S1} -> - V = make_object({struct, lists:reverse(Acc)}, S1), - {V, S1#decoder{state=null}}; - {comma, S1} -> - decode_object(B, S1#decoder{state=key}, Acc) + {end_object, S1} -> + V = make_object({struct, lists:reverse(Acc)}, S1), + {V, S1#decoder{state = null}}; + {comma, S1} -> + decode_object(B, S1#decoder{state = key}, Acc) end. decode_array(B, S) -> - decode_array(B, S#decoder{state=any}, []). + decode_array(B, S#decoder{state = any}, []). -decode_array(B, S=#decoder{state=any}, Acc) -> +decode_array(B, S = #decoder{state = any}, Acc) -> case tokenize(B, S) of - {end_array, S1} -> - {lists:reverse(Acc), S1#decoder{state=null}}; - {start_array, S1} -> - {Array, S2} = decode_array(B, S1), - decode_array(B, S2#decoder{state=comma}, [Array | Acc]); - {start_object, S1} -> - {Array, S2} = decode_object(B, S1), - decode_array(B, S2#decoder{state=comma}, [Array | Acc]); - {{const, Const}, S1} -> - decode_array(B, S1#decoder{state=comma}, [Const | Acc]) + {end_array, S1} -> + {lists:reverse(Acc), S1#decoder{state = null}}; + {start_array, S1} -> + {Array, S2} = decode_array(B, S1), + decode_array(B, S2#decoder{state = comma}, + [Array | Acc]); + {start_object, S1} -> + {Array, S2} = decode_object(B, S1), + decode_array(B, S2#decoder{state = comma}, + [Array | Acc]); + {{const, Const}, S1} -> + decode_array(B, S1#decoder{state = comma}, + [Const | Acc]) end; -decode_array(B, S=#decoder{state=comma}, Acc) -> +decode_array(B, S = #decoder{state = comma}, Acc) -> case tokenize(B, S) of - {end_array, S1} -> - {lists:reverse(Acc), S1#decoder{state=null}}; - {comma, S1} -> - decode_array(B, S1#decoder{state=any}, Acc) + {end_array, S1} -> + {lists:reverse(Acc), S1#decoder{state = null}}; + {comma, S1} -> + decode_array(B, S1#decoder{state = any}, Acc) end. -tokenize_string(B, S=#decoder{offset=O}) -> +tokenize_string(B, S = #decoder{offset = O}) -> case tokenize_string_fast(B, O) of - {escape, O1} -> - Length = O1 - O, - S1 = ?ADV_COL(S, Length), - <<_:O/binary, Head:Length/binary, _/binary>> = B, - tokenize_string(B, S1, lists:reverse(binary_to_list(Head))); - O1 -> - Length = O1 - O, - <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B, - {{const, String}, ?ADV_COL(S, Length + 1)} + {escape, O1} -> + Length = O1 - O, + S1 = (?ADV_COL(S, Length)), + <<_:O/binary, Head:Length/binary, _/binary>> = B, + tokenize_string(B, S1, + lists:reverse(binary_to_list(Head))); + O1 -> + Length = O1 - O, + <<_:O/binary, String:Length/binary, (?Q), _/binary>> = + B, + {{const, String}, ?ADV_COL(S, (Length + 1))} end. tokenize_string_fast(B, O) -> case B of - <<_:O/binary, ?Q, _/binary>> -> - O; - <<_:O/binary, $\\, _/binary>> -> - {escape, O}; - <<_:O/binary, C1, _/binary>> when C1 < 128 -> - tokenize_string_fast(B, 1 + O); - <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, - C2 >= 128, C2 =< 191 -> - tokenize_string_fast(B, 2 + O); - <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, - C2 >= 128, C2 =< 191, - C3 >= 128, C3 =< 191 -> - tokenize_string_fast(B, 3 + O); - <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, - C2 >= 128, C2 =< 191, - C3 >= 128, C3 =< 191, - C4 >= 128, C4 =< 191 -> - tokenize_string_fast(B, 4 + O); - _ -> - throw(invalid_utf8) + <<_:O/binary, (?Q), _/binary>> -> O; + <<_:O/binary, $\\, _/binary>> -> {escape, O}; + <<_:O/binary, C1, _/binary>> when C1 < 128 -> + tokenize_string_fast(B, 1 + O); + <<_:O/binary, C1, C2, _/binary>> + when C1 >= 194, C1 =< 223, C2 >= 128, C2 =< 191 -> + tokenize_string_fast(B, 2 + O); + <<_:O/binary, C1, C2, C3, _/binary>> + when C1 >= 224, C1 =< 239, C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191 -> + tokenize_string_fast(B, 3 + O); + <<_:O/binary, C1, C2, C3, C4, _/binary>> + when C1 >= 240, C1 =< 244, C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191, C4 >= 128, C4 =< 191 -> + tokenize_string_fast(B, 4 + O); + _ -> throw(invalid_utf8) end. -tokenize_string(B, S=#decoder{offset=O}, Acc) -> +tokenize_string(B, S = #decoder{offset = O}, Acc) -> case B of - <<_:O/binary, ?Q, _/binary>> -> - {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)}; - <<_:O/binary, "\\\"", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]); - <<_:O/binary, "\\\\", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]); - <<_:O/binary, "\\/", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]); - <<_:O/binary, "\\b", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]); - <<_:O/binary, "\\f", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]); - <<_:O/binary, "\\n", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]); - <<_:O/binary, "\\r", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]); - <<_:O/binary, "\\t", _/binary>> -> - tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]); - <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> -> - C = erlang:list_to_integer([C3, C2, C1, C0], 16), - if C > 16#D7FF, C < 16#DC00 -> - %% coalesce UTF-16 surrogate pair - <<"\\u", D3, D2, D1, D0, _/binary>> = Rest, - D = erlang:list_to_integer([D3,D2,D1,D0], 16), - [CodePoint] = xmerl_ucs:from_utf16be(<>), - Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc), - tokenize_string(B, ?ADV_COL(S, 12), Acc1); - true -> - R = if C < 16#FFFE -> - xmerl_ucs:to_utf8(C); - true -> - [16#E0 + (C bsr 12), - 128+((C bsr 6) band 16#3F), - 128+(C band 16#3F)] - end, - Acc1 = lists:reverse(R, Acc), - tokenize_string(B, ?ADV_COL(S, 6), Acc1) - end; - <<_:O/binary, C1, _/binary>> when C1 < 128 -> - tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]); - <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, - C2 >= 128, C2 =< 191 -> - tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]); - <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, - C2 >= 128, C2 =< 191, - C3 >= 128, C3 =< 191 -> - tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]); - <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, - C2 >= 128, C2 =< 191, - C3 >= 128, C3 =< 191, - C4 >= 128, C4 =< 191 -> - tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]); - _ -> - throw(invalid_utf8) + <<_:O/binary, (?Q), _/binary>> -> + {{const, iolist_to_binary(lists:reverse(Acc))}, + ?INC_COL(S)}; + <<_:O/binary, "\\\"", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$" | Acc]); + <<_:O/binary, "\\\\", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]); + <<_:O/binary, "\\/", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]); + <<_:O/binary, "\\b", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]); + <<_:O/binary, "\\f", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]); + <<_:O/binary, "\\n", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]); + <<_:O/binary, "\\r", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]); + <<_:O/binary, "\\t", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]); + <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> -> + C = erlang:list_to_integer([C3, C2, C1, C0], 16), + if C > 55295, C < 56320 -> + %% coalesce UTF-16 surrogate pair + <<"\\u", D3, D2, D1, D0, _/binary>> = Rest, + D = erlang:list_to_integer([D3, D2, D1, D0], 16), + [CodePoint] = + xmerl_ucs:from_utf16be(<>), + Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc), + tokenize_string(B, ?ADV_COL(S, 12), Acc1); + true -> + R = if C < 65534 -> xmerl_ucs:to_utf8(C); + true -> + [224 + (C bsr 12), 128 + (C bsr 6) band 63, + 128 + C band 63] + end, + Acc1 = lists:reverse(R, Acc), + tokenize_string(B, ?ADV_COL(S, 6), Acc1) + end; + <<_:O/binary, C1, _/binary>> when C1 < 128 -> + tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]); + <<_:O/binary, C1, C2, _/binary>> + when C1 >= 194, C1 =< 223, C2 >= 128, C2 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]); + <<_:O/binary, C1, C2, C3, _/binary>> + when C1 >= 224, C1 =< 239, C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]); + <<_:O/binary, C1, C2, C3, C4, _/binary>> + when C1 >= 240, C1 =< 244, C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191, C4 >= 128, C4 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 4), + [C4, C3, C2, C1 | Acc]); + _ -> throw(invalid_utf8) end. tokenize_number(B, S) -> case tokenize_number(B, sign, S, []) of - {{int, Int}, S1} -> - {{const, list_to_integer(Int)}, S1}; - {{float, Float}, S1} -> - {{const, list_to_float(Float)}, S1} + {{int, Int}, S1} -> {{const, list_to_integer(Int)}, S1}; + {{float, Float}, S1} -> + {{const, list_to_float(Float)}, S1} end. -tokenize_number(B, sign, S=#decoder{offset=O}, []) -> +tokenize_number(B, sign, S = #decoder{offset = O}, + []) -> case B of - <<_:O/binary, $-, _/binary>> -> - tokenize_number(B, int, ?INC_COL(S), [$-]); - _ -> - tokenize_number(B, int, S, []) + <<_:O/binary, $-, _/binary>> -> + tokenize_number(B, int, ?INC_COL(S), [$-]); + _ -> tokenize_number(B, int, S, []) end; -tokenize_number(B, int, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, int, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, $0, _/binary>> -> - tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]); - <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 -> - tokenize_number(B, int1, ?INC_COL(S), [C | Acc]) + <<_:O/binary, $0, _/binary>> -> + tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]); + <<_:O/binary, C, _/binary>> + when C >= $1 andalso C =< $9 -> + tokenize_number(B, int1, ?INC_COL(S), [C | Acc]) end; -tokenize_number(B, int1, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, int1, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> - tokenize_number(B, int1, ?INC_COL(S), [C | Acc]); - _ -> - tokenize_number(B, frac, S, Acc) + <<_:O/binary, C, _/binary>> + when C >= $0 andalso C =< $9 -> + tokenize_number(B, int1, ?INC_COL(S), [C | Acc]); + _ -> tokenize_number(B, frac, S, Acc) end; -tokenize_number(B, frac, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, frac, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 -> - tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]); - <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> - tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]); - _ -> - {{int, lists:reverse(Acc)}, S} + <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 -> + tokenize_number(B, frac1, ?ADV_COL(S, 2), + [C, $. | Acc]); + <<_:O/binary, E, _/binary>> + when E =:= $e orelse E =:= $E -> + tokenize_number(B, esign, ?INC_COL(S), + [$e, $0, $. | Acc]); + _ -> {{int, lists:reverse(Acc)}, S} end; -tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, frac1, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> - tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]); - <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> - tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]); - _ -> - {{float, lists:reverse(Acc)}, S} + <<_:O/binary, C, _/binary>> + when C >= $0 andalso C =< $9 -> + tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]); + <<_:O/binary, E, _/binary>> + when E =:= $e orelse E =:= $E -> + tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]); + _ -> {{float, lists:reverse(Acc)}, S} end; -tokenize_number(B, esign, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, esign, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ -> - tokenize_number(B, eint, ?INC_COL(S), [C | Acc]); - _ -> - tokenize_number(B, eint, S, Acc) + <<_:O/binary, C, _/binary>> + when C =:= $- orelse C =:= $+ -> + tokenize_number(B, eint, ?INC_COL(S), [C | Acc]); + _ -> tokenize_number(B, eint, S, Acc) end; -tokenize_number(B, eint, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, eint, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> - tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]) + <<_:O/binary, C, _/binary>> + when C >= $0 andalso C =< $9 -> + tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]) end; -tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) -> +tokenize_number(B, eint1, S = #decoder{offset = O}, + Acc) -> case B of - <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> - tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]); - _ -> - {{float, lists:reverse(Acc)}, S} + <<_:O/binary, C, _/binary>> + when C >= $0 andalso C =< $9 -> + tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]); + _ -> {{float, lists:reverse(Acc)}, S} end. -tokenize(B, S=#decoder{offset=O}) -> +tokenize(B, S = #decoder{offset = O}) -> case B of - <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) -> - tokenize(B, ?INC_CHAR(S, C)); - <<_:O/binary, "{", _/binary>> -> - {start_object, ?INC_COL(S)}; - <<_:O/binary, "}", _/binary>> -> - {end_object, ?INC_COL(S)}; - <<_:O/binary, "[", _/binary>> -> - {start_array, ?INC_COL(S)}; - <<_:O/binary, "]", _/binary>> -> - {end_array, ?INC_COL(S)}; - <<_:O/binary, ",", _/binary>> -> - {comma, ?INC_COL(S)}; - <<_:O/binary, ":", _/binary>> -> - {colon, ?INC_COL(S)}; - <<_:O/binary, "null", _/binary>> -> - {{const, null}, ?ADV_COL(S, 4)}; - <<_:O/binary, "true", _/binary>> -> - {{const, true}, ?ADV_COL(S, 4)}; - <<_:O/binary, "false", _/binary>> -> - {{const, false}, ?ADV_COL(S, 5)}; - <<_:O/binary, "\"", _/binary>> -> - tokenize_string(B, ?INC_COL(S)); - <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9) - orelse C =:= $- -> - tokenize_number(B, S); - <<_:O/binary>> -> - trim = S#decoder.state, - {eof, S} + <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) -> + tokenize(B, ?INC_CHAR(S, C)); + <<_:O/binary, "{", _/binary>> -> + {start_object, ?INC_COL(S)}; + <<_:O/binary, "}", _/binary>> -> + {end_object, ?INC_COL(S)}; + <<_:O/binary, "[", _/binary>> -> + {start_array, ?INC_COL(S)}; + <<_:O/binary, "]", _/binary>> -> + {end_array, ?INC_COL(S)}; + <<_:O/binary, ",", _/binary>> -> {comma, ?INC_COL(S)}; + <<_:O/binary, ":", _/binary>> -> {colon, ?INC_COL(S)}; + <<_:O/binary, "null", _/binary>> -> + {{const, null}, ?ADV_COL(S, 4)}; + <<_:O/binary, "true", _/binary>> -> + {{const, true}, ?ADV_COL(S, 4)}; + <<_:O/binary, "false", _/binary>> -> + {{const, false}, ?ADV_COL(S, 5)}; + <<_:O/binary, "\"", _/binary>> -> + tokenize_string(B, ?INC_COL(S)); + <<_:O/binary, C, _/binary>> + when C >= $0 andalso C =< $9 orelse C =:= $- -> + tokenize_number(B, S); + <<_:O/binary>> -> trim = S#decoder.state, {eof, S} end. + %% %% Tests %% -ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). +-include_lib("eunit/include/eunit.hrl"). %% testing constructs borrowed from the Yaws JSON implementation. %% Create an object from a list of Key/Value pairs. -obj_new() -> - {struct, []}. - -is_obj({struct, Props}) -> - F = fun ({K, _}) when is_binary(K) -> true end, - lists:all(F, Props). - -obj_from_list(Props) -> - Obj = {struct, Props}, - ?assert(is_obj(Obj)), - Obj. - %% Test for equivalence of Erlang terms. %% Due to arbitrary order of construction, equivalent objects might %% compare unequal as erlang terms, so we need to carefully recurse %% through aggregates (tuples and objects). -equiv({struct, Props1}, {struct, Props2}) -> - equiv_object(Props1, Props2); -equiv(L1, L2) when is_list(L1), is_list(L2) -> - equiv_list(L1, L2); -equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2; -equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2; -equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true. - %% Object representation and traversal order is unknown. %% Use the sledgehammer and sort property lists. -equiv_object(Props1, Props2) -> - L1 = lists:keysort(1, Props1), - L2 = lists:keysort(1, Props2), - Pairs = lists:zip(L1, L2), - true = lists:all(fun({{K1, V1}, {K2, V2}}) -> - equiv(K1, K2) and equiv(V1, V2) - end, Pairs). - %% Recursively compare tuple elements for equivalence. -equiv_list([], []) -> - true; -equiv_list([V1 | L1], [V2 | L2]) -> - equiv(V1, V2) andalso equiv_list(L1, L2). - -decode_test() -> - [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>), - <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]). - -e2j_vec_test() -> - test_one(e2j_test_vec(utf8), 1). - -test_one([], _N) -> - %% io:format("~p tests passed~n", [N-1]), - ok; -test_one([{E, J} | Rest], N) -> - %% io:format("[~p] ~p ~p~n", [N, E, J]), - true = equiv(E, decode(J)), - true = equiv(E, decode(encode(E))), - test_one(Rest, 1+N). - -e2j_test_vec(utf8) -> - [ - {1, "1"}, - {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes - {-1, "-1"}, - {-3.1416, "-3.14160"}, - {12.0e10, "1.20000e+11"}, - {1.234E+10, "1.23400e+10"}, - {-1.234E-10, "-1.23400e-10"}, - {10.0, "1.0e+01"}, - {123.456, "1.23456E+2"}, - {10.0, "1e1"}, - {<<"foo">>, "\"foo\""}, - {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""}, - {<<"">>, "\"\""}, - {<<"\n\n\n">>, "\"\\n\\n\\n\""}, - {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""}, - {obj_new(), "{}"}, - {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"}, - {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]), - "{\"foo\":\"bar\",\"baz\":123}"}, - {[], "[]"}, - {[[]], "[[]]"}, - {[1, <<"foo">>], "[1,\"foo\"]"}, - - %% json array in a json object - {obj_from_list([{<<"foo">>, [123]}]), - "{\"foo\":[123]}"}, - - %% json object in a json object - {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]), - "{\"foo\":{\"bar\":true}}"}, - - %% fold evaluation order - {obj_from_list([{<<"foo">>, []}, - {<<"bar">>, obj_from_list([{<<"baz">>, true}])}, - {<<"alice">>, <<"bob">>}]), - "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"}, - - %% json object in a json array - {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null], - "[-123,\"foo\",{\"bar\":[]},null]"} - ]. - -%% test utf8 encoding -encoder_utf8_test() -> - %% safe conversion case (default) - [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] = - encode(<<1,"\321\202\320\265\321\201\321\202">>), - - %% raw utf8 output (optional) - Enc = mochijson2:encoder([{utf8, true}]), - [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] = - Enc(<<1,"\321\202\320\265\321\201\321\202">>). - -input_validation_test() -> - Good = [ - {16#00A3, <>}, %% pound - {16#20AC, <>}, %% euro - {16#10196, <>} %% denarius - ], - lists:foreach(fun({CodePoint, UTF8}) -> - Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)), - Expect = decode(UTF8) - end, Good), - - Bad = [ - %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte - <>, - %% missing continuations, last byte in each should be 80-BF - <>, - <>, - <>, - %% we don't support code points > 10FFFF per RFC 3629 - <>, - %% escape characters trigger a different code path - <> - ], - lists:foreach( - fun(X) -> - ok = try decode(X) catch invalid_utf8 -> ok end, - %% could be {ucs,{bad_utf8_character_code}} or - %% {json_encode,{bad_char,_}} - {'EXIT', _} = (catch encode(X)) - end, Bad). - -inline_json_test() -> - ?assertEqual(<<"\"iodata iodata\"">>, - iolist_to_binary( - encode({json, [<<"\"iodata">>, " iodata\""]}))), - ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]}, - decode( - encode({struct, - [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))), - ok. - -big_unicode_test() -> - UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)), - ?assertEqual( - <<"\"\\ud834\\udd20\"">>, - iolist_to_binary(encode(UTF8Seq))), - ?assertEqual( - UTF8Seq, - decode(iolist_to_binary(encode(UTF8Seq)))), - ok. - -custom_decoder_test() -> - ?assertEqual( - {struct, [{<<"key">>, <<"value">>}]}, - (decoder([]))("{\"key\": \"value\"}")), - F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end, - ?assertEqual( - win, - (decoder([{object_hook, F}]))("{\"key\": \"value\"}")), - ok. - -atom_test() -> - %% JSON native atoms - [begin - ?assertEqual(A, decode(atom_to_list(A))), - ?assertEqual(iolist_to_binary(atom_to_list(A)), - iolist_to_binary(encode(A))) - end || A <- [true, false, null]], - %% Atom to string - ?assertEqual( - <<"\"foo\"">>, - iolist_to_binary(encode(foo))), - ?assertEqual( - <<"\"\\ud834\\udd20\"">>, - iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))), - ok. - -key_encode_test() -> - %% Some forms are accepted as keys that would not be strings in other - %% cases - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode({struct, [{foo, 1}]}))), - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))), - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode({struct, [{"foo", 1}]}))), - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode([{foo, 1}]))), - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode([{<<"foo">>, 1}]))), - ?assertEqual( - <<"{\"foo\":1}">>, - iolist_to_binary(encode([{"foo", 1}]))), - ?assertEqual( - <<"{\"\\ud834\\udd20\":1}">>, - iolist_to_binary( - encode({struct, [{[16#0001d120], 1}]}))), - ?assertEqual( - <<"{\"1\":1}">>, - iolist_to_binary(encode({struct, [{1, 1}]}))), - ok. - -unsafe_chars_test() -> - Chars = "\"\\\b\f\n\r\t", - [begin - ?assertEqual(false, json_string_is_safe([C])), - ?assertEqual(false, json_bin_is_safe(<>)), - ?assertEqual(<>, decode(encode(<>))) - end || C <- Chars], - ?assertEqual( - false, - json_string_is_safe([16#0001d120])), - ?assertEqual( - false, - json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))), - ?assertEqual( - [16#0001d120], - xmerl_ucs:from_utf8( - binary_to_list( - decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))), - ?assertEqual( - false, - json_string_is_safe([16#110000])), - ?assertEqual( - false, - json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))), - %% solidus can be escaped but isn't unsafe by default - ?assertEqual( - <<"/">>, - decode(<<"\"\\/\"">>)), - ok. - -int_test() -> - ?assertEqual(0, decode("0")), - ?assertEqual(1, decode("1")), - ?assertEqual(11, decode("11")), - ok. - -large_int_test() -> - ?assertEqual(<<"-2147483649214748364921474836492147483649">>, - iolist_to_binary(encode(-2147483649214748364921474836492147483649))), - ?assertEqual(<<"2147483649214748364921474836492147483649">>, - iolist_to_binary(encode(2147483649214748364921474836492147483649))), - ok. - -float_test() -> - ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))), - ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))), - ok. - -handler_test() -> - ?assertEqual( - {'EXIT',{json_encode,{bad_term,{x,y}}}}, - catch encode({x,y})), - F = fun ({x,y}) -> [] end, - ?assertEqual( - <<"[]">>, - iolist_to_binary((encoder([{handler, F}]))({x, y}))), - ok. - -encode_empty_test_() -> - [{A, ?_assertEqual(<<"{}">>, iolist_to_binary(encode(B)))} - || {A, B} <- [{"eep18 {}", {}}, - {"eep18 {[]}", {[]}}, - {"{struct, []}", {struct, []}}]]. - -encode_test_() -> - P = [{<<"k">>, <<"v">>}], - JSON = iolist_to_binary(encode({struct, P})), - [{atom_to_list(F), - ?_assertEqual(JSON, iolist_to_binary(encode(decode(JSON, [{format, F}]))))} - || F <- [struct, eep18, proplist]]. - -format_test_() -> - P = [{<<"k">>, <<"v">>}], - JSON = iolist_to_binary(encode({struct, P})), - [{atom_to_list(F), - ?_assertEqual(A, decode(JSON, [{format, F}]))} - || {F, A} <- [{struct, {struct, P}}, - {eep18, {P}}, - {proplist, P}]]. - -endif. diff --git a/src/mochinum_fork.erl b/src/mochinum_fork.erl index ea22766..9ee7774 100644 --- a/src/mochinum_fork.erl +++ b/src/mochinum_fork.erl @@ -10,12 +10,16 @@ %% Design and Implementation. -module(mochinum_fork). + -author("Bob Ippolito "). --export([digits/1, frexp/1, int_pow/2, int_ceil/1]). + +-export([digits/1, frexp/1, int_ceil/1, int_pow/2]). %% IEEE 754 Float exponent bias -define(FLOAT_BIAS, 1022). + -define(MIN_EXP, -1074). + -define(BIG_POW, 4503599627370496). %% External API @@ -24,36 +28,29 @@ %% @doc Returns a string that accurately represents the given integer or float %% using a conservative amount of digits. Great for generating %% human-readable output, or compact ASCII serializations for floats. -digits(N) when is_integer(N) -> - integer_to_list(N); -digits(0.0) -> - "0.0"; +digits(N) when is_integer(N) -> integer_to_list(N); +digits(0.0) -> "0.0"; digits(Float) -> {Frac1, Exp1} = frexp_int(Float), [Place0 | Digits0] = digits1(Float, Exp1, Frac1), {Place, Digits} = transform_digits(Place0, Digits0), R = insert_decimal(Place, Digits), case Float < 0 of - true -> - [$- | R]; - _ -> - R + true -> [$- | R]; + _ -> R end. %% @spec frexp(F::float()) -> {Frac::float(), Exp::float()} %% @doc Return the fractional and exponent part of an IEEE 754 double, %% equivalent to the libc function of the same name. %% F = Frac * pow(2, Exp). -frexp(F) -> - frexp1(unpack(F)). +frexp(F) -> frexp1(unpack(F)). %% @spec int_pow(X::integer(), N::integer()) -> Y::integer() %% @doc Moderately efficient way to exponentiate integers. %% int_pow(10, 2) = 100. -int_pow(_X, 0) -> - 1; -int_pow(X, N) when N > 0 -> - int_pow(X, N, 1). +int_pow(_X, 0) -> 1; +int_pow(X, N) when N > 0 -> int_pow(X, N, 1). %% @spec int_ceil(F::float()) -> integer() %% @doc Return the ceiling of F as an integer. The ceiling is defined as @@ -62,34 +59,32 @@ int_pow(X, N) when N > 0 -> %% trunc(F) + 1 when F > 0. int_ceil(X) -> T = trunc(X), - case (X - T) of - Pos when Pos > 0 -> T + 1; - _ -> T + case X - T of + Pos when Pos > 0 -> T + 1; + _ -> T end. - %% Internal API -int_pow(X, N, R) when N < 2 -> - R * X; +int_pow(X, N, R) when N < 2 -> R * X; int_pow(X, N, R) -> - int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end). + int_pow(X * X, N bsr 1, + case N band 1 of + 1 -> R * X; + 0 -> R + end). -insert_decimal(0, S) -> - "0." ++ S; +insert_decimal(0, S) -> "0." ++ S; insert_decimal(Place, S) when Place > 0 -> L = length(S), case Place - L of - 0 -> - S ++ ".0"; - N when N < 0 -> - {S0, S1} = lists:split(L + N, S), - S0 ++ "." ++ S1; - N when N < 6 -> - %% More places than digits - S ++ lists:duplicate(N, $0) ++ ".0"; - _ -> - insert_decimal_exp(Place, S) + 0 -> S ++ ".0"; + N when N < 0 -> + {S0, S1} = lists:split(L + N, S), S0 ++ "." ++ S1; + N when N < 6 -> + %% More places than digits + S ++ lists:duplicate(N, $0) ++ ".0"; + _ -> insert_decimal_exp(Place, S) end; insert_decimal(Place, S) when Place > -6 -> "0." ++ lists:duplicate(abs(Place), $0) ++ S; @@ -99,42 +94,38 @@ insert_decimal(Place, S) -> insert_decimal_exp(Place, S) -> [C | S0] = S, S1 = case S0 of - [] -> - "0"; - _ -> - S0 - end, + [] -> "0"; + _ -> S0 + end, Exp = case Place < 0 of - true -> - "e-"; - false -> - "e+" - end, - [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)). - + true -> "e-"; + false -> "e+" + end, + [C] ++ + "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)). digits1(Float, Exp, Frac) -> - Round = ((Frac band 1) =:= 0), + Round = Frac band 1 =:= 0, case Exp >= 0 of - true -> - BExp = 1 bsl Exp, - case (Frac =/= ?BIG_POW) of - true -> - scale((Frac * BExp * 2), 2, BExp, BExp, - Round, Round, Float); - false -> - scale((Frac * BExp * 4), 4, (BExp * 2), BExp, - Round, Round, Float) - end; - false -> - case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of - true -> - scale((Frac * 2), 1 bsl (1 - Exp), 1, 1, - Round, Round, Float); - false -> - scale((Frac * 4), 1 bsl (2 - Exp), 2, 1, - Round, Round, Float) - end + true -> + BExp = 1 bsl Exp, + case Frac =/= (?BIG_POW) of + true -> + scale(Frac * BExp * 2, 2, BExp, BExp, Round, Round, + Float); + false -> + scale(Frac * BExp * 4, 4, BExp * 2, BExp, Round, Round, + Float) + end; + false -> + case Exp =:= (?MIN_EXP) orelse Frac =/= (?BIG_POW) of + true -> + scale(Frac * 2, 1 bsl (1 - Exp), 1, 1, Round, Round, + Float); + false -> + scale(Frac * 4, 1 bsl (2 - Exp), 2, 1, Round, Round, + Float) + end end. scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) -> @@ -142,213 +133,95 @@ scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) -> %% Note that the scheme implementation uses a 326 element look-up table %% for int_pow(10, N) where we do not. case Est >= 0 of - true -> - fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est, - LowOk, HighOk); - false -> - Scale = int_pow(10, -Est), - fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est, - LowOk, HighOk) + true -> + fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est, + LowOk, HighOk); + false -> + Scale = int_pow(10, -Est), + fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est, + LowOk, HighOk) end. fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) -> TooLow = case HighOk of - true -> - (R + MPlus) >= S; - false -> - (R + MPlus) > S - end, + true -> R + MPlus >= S; + false -> R + MPlus > S + end, case TooLow of - true -> - [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)]; - false -> - [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)] + true -> + [K + 1 | generate(R, S, MPlus, MMinus, LowOk, HighOk)]; + false -> + [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, + HighOk)] end. generate(R0, S, MPlus, MMinus, LowOk, HighOk) -> D = R0 div S, R = R0 rem S, TC1 = case LowOk of - true -> - R =< MMinus; - false -> - R < MMinus - end, + true -> R =< MMinus; + false -> R < MMinus + end, TC2 = case HighOk of - true -> - (R + MPlus) >= S; - false -> - (R + MPlus) > S - end, + true -> R + MPlus >= S; + false -> R + MPlus > S + end, case TC1 of - false -> - case TC2 of - false -> - [D | generate(R * 10, S, MPlus * 10, MMinus * 10, - LowOk, HighOk)]; - true -> - [D + 1] - end; - true -> - case TC2 of - false -> - [D]; - true -> - case R * 2 < S of - true -> - [D]; - false -> - [D + 1] - end - end + false -> + case TC2 of + false -> + [D | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, + HighOk)]; + true -> [D + 1] + end; + true -> + case TC2 of + false -> [D]; + true -> + case R * 2 < S of + true -> [D]; + false -> [D + 1] + end + end end. unpack(Float) -> <> = <>, {Sign, Exp, Frac}. -frexp1({_Sign, 0, 0}) -> - {0.0, 0}; +frexp1({_Sign, 0, 0}) -> {0.0, 0}; frexp1({Sign, 0, Frac}) -> Exp = log2floor(Frac), - <> = <>, + <> = <>, {Frac1, -(?FLOAT_BIAS) - 52 + Exp}; frexp1({Sign, Exp, Frac}) -> - <> = <>, - {Frac1, Exp - ?FLOAT_BIAS}. - -log2floor(Int) -> - log2floor(Int, 0). + <> = <>, + {Frac1, Exp - (?FLOAT_BIAS)}. -log2floor(0, N) -> - N; -log2floor(Int, N) -> - log2floor(Int bsr 1, 1 + N). +log2floor(Int) -> log2floor(Int, 0). +log2floor(0, N) -> N; +log2floor(Int, N) -> log2floor(Int bsr 1, 1 + N). transform_digits(Place, [0 | Rest]) -> transform_digits(Place, Rest); transform_digits(Place, Digits) -> {Place, [$0 + D || D <- Digits]}. - frexp_int(F) -> case unpack(F) of - {_Sign, 0, Frac} -> - {Frac, ?MIN_EXP}; - {_Sign, Exp, Frac} -> - {Frac + (1 bsl 52), Exp - 53 - ?FLOAT_BIAS} + {_Sign, 0, Frac} -> {Frac, ?MIN_EXP}; + {_Sign, Exp, Frac} -> + {Frac + (1 bsl 52), Exp - 53 - (?FLOAT_BIAS)} end. %% %% Tests %% -ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). - -int_ceil_test() -> - ?assertEqual(1, int_ceil(0.0001)), - ?assertEqual(0, int_ceil(0.0)), - ?assertEqual(1, int_ceil(0.99)), - ?assertEqual(1, int_ceil(1.0)), - ?assertEqual(-1, int_ceil(-1.5)), - ?assertEqual(-2, int_ceil(-2.0)), - ok. -int_pow_test() -> - ?assertEqual(1, int_pow(1, 1)), - ?assertEqual(1, int_pow(1, 0)), - ?assertEqual(1, int_pow(10, 0)), - ?assertEqual(10, int_pow(10, 1)), - ?assertEqual(100, int_pow(10, 2)), - ?assertEqual(1000, int_pow(10, 3)), - ok. - -digits_test() -> - ?assertEqual("0", - digits(0)), - ?assertEqual("0.0", - digits(0.0)), - ?assertEqual("1.0", - digits(1.0)), - ?assertEqual("-1.0", - digits(-1.0)), - ?assertEqual("0.1", - digits(0.1)), - ?assertEqual("0.01", - digits(0.01)), - ?assertEqual("0.001", - digits(0.001)), - ?assertEqual("1.0e+6", - digits(1000000.0)), - ?assertEqual("0.5", - digits(0.5)), - ?assertEqual("4503599627370496.0", - digits(4503599627370496.0)), - %% small denormalized number - %% 4.94065645841246544177e-324 =:= 5.0e-324 - <> = <<0,0,0,0,0,0,0,1>>, - ?assertEqual("5.0e-324", - digits(SmallDenorm)), - ?assertEqual(SmallDenorm, - list_to_float(digits(SmallDenorm))), - %% large denormalized number - %% 2.22507385850720088902e-308 - <> = <<0,15,255,255,255,255,255,255>>, - ?assertEqual("2.225073858507201e-308", - digits(BigDenorm)), - ?assertEqual(BigDenorm, - list_to_float(digits(BigDenorm))), - %% small normalized number - %% 2.22507385850720138309e-308 - <> = <<0,16,0,0,0,0,0,0>>, - ?assertEqual("2.2250738585072014e-308", - digits(SmallNorm)), - ?assertEqual(SmallNorm, - list_to_float(digits(SmallNorm))), - %% large normalized number - %% 1.79769313486231570815e+308 - <> = <<127,239,255,255,255,255,255,255>>, - ?assertEqual("1.7976931348623157e+308", - digits(LargeNorm)), - ?assertEqual(LargeNorm, - list_to_float(digits(LargeNorm))), - %% issue #10 - mochinum:frexp(math:pow(2, -1074)). - ?assertEqual("5.0e-324", - digits(math:pow(2, -1074))), - ok. - -frexp_test() -> - %% zero - ?assertEqual({0.0, 0}, frexp(0.0)), - %% one - ?assertEqual({0.5, 1}, frexp(1.0)), - %% negative one - ?assertEqual({-0.5, 1}, frexp(-1.0)), - %% small denormalized number - %% 4.94065645841246544177e-324 - <> = <<0,0,0,0,0,0,0,1>>, - ?assertEqual({0.5, -1073}, frexp(SmallDenorm)), - %% large denormalized number - %% 2.22507385850720088902e-308 - <> = <<0,15,255,255,255,255,255,255>>, - ?assertEqual( - {0.99999999999999978, -1022}, - frexp(BigDenorm)), - %% small normalized number - %% 2.22507385850720138309e-308 - <> = <<0,16,0,0,0,0,0,0>>, - ?assertEqual({0.5, -1021}, frexp(SmallNorm)), - %% large normalized number - %% 1.79769313486231570815e+308 - <> = <<127,239,255,255,255,255,255,255>>, - ?assertEqual( - {0.99999999999999989, 1024}, - frexp(LargeNorm)), - %% issue #10 - mochinum:frexp(math:pow(2, -1074)). - ?assertEqual( - {0.5, -1073}, - frexp(math:pow(2, -1074))), - ok. +-include_lib("eunit/include/eunit.hrl"). -endif. diff --git a/src/sockjs.erl b/src/sockjs.erl index a8bf0e4..dd00b97 100644 --- a/src/sockjs.erl +++ b/src/sockjs.erl @@ -1,38 +1,54 @@ -module(sockjs). --export([send/2, close/1, close/3, info/1]). --export([to_session/1, to_channel/2]). +-export([close/1, close/3, info/1, send/2]). + +-export([to_channel/2, to_session/1]). %% Send data over a connection/channel. --spec send(iodata(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. +-spec send(iodata(), + sockjs_session:conn() | + sockjs_multiplex_channel:channel()) -> ok. + send(Data, Conn = {sockjs_session, _}) -> sockjs_session:send(Data, Conn); -send(Data, Channel = {sockjs_multiplex_channel, _, _}) -> - sockjs_multiplex_channel:send(Data, Channel). +send(Data, + Channel = {sockjs_multiplex_channel, _, _}) -> + sockjs_multiplex_channel:send(Data, Channel). %% Initiate a close of a connection/channel. --spec close(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. -close(Conn) -> - close(1000, "Normal closure", Conn). +-spec close(sockjs_session:conn() | + sockjs_multiplex_channel:channel()) -> ok. + +close(Conn) -> close(1000, "Normal closure", Conn). + +-spec close(non_neg_integer(), string(), + sockjs_session:conn() | + sockjs_multiplex_channel:channel()) -> ok. --spec close(non_neg_integer(), string(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. close(Code, Reason, Conn = {sockjs_session, _}) -> sockjs_session:close(Code, Reason, Conn); -close(Code, Reason, Channel = {sockjs_multiplex_channel, _, _}) -> +close(Code, Reason, + Channel = {sockjs_multiplex_channel, _, _}) -> sockjs_multiplex_channel:close(Code, Reason, Channel). --spec info(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> [{atom(), any()}]. +-spec info(sockjs_session:conn() | + sockjs_multiplex_channel:channel()) -> [{atom(), + any()}]. + info(Conn = {sockjs_session, _}) -> sockjs_session:info(Conn); info(Channel = {sockjs_multiplex_channel, _, _}) -> sockjs_multiplex_channel:info(Channel). %% Get the backend connection of a channel. --spec to_session(sockjs_multiplex_channel:channel()) -> sockjs_session:conn(). -to_session({sockjs_multiplex_channel, Conn, _}) -> - Conn. +-spec + to_session(sockjs_multiplex_channel:channel()) -> sockjs_session:conn(). + +to_session({sockjs_multiplex_channel, Conn, _}) -> Conn. %% Create a channel from a connection. --spec to_channel(sockjs_session:conn(), sockjs_multiplex_channel:topic()) -> sockjs_multiplex_channel:channel(). +-spec to_channel(sockjs_session:conn(), + sockjs_multiplex_channel:topic()) -> sockjs_multiplex_channel:channel(). + to_channel(Conn = {sockjs_session, _}, Topic) -> - {sockjs_multiplex_channel, Conn, Topic}. \ No newline at end of file + {sockjs_multiplex_channel, Conn, Topic}. diff --git a/src/sockjs_action.erl b/src/sockjs_action.erl index c65b012..23f072d 100644 --- a/src/sockjs_action.erl +++ b/src/sockjs_action.erl @@ -1,279 +1,330 @@ -module(sockjs_action). % none --export([welcome_screen/3, options/3, iframe/3, info_test/3]). +-export([iframe/3, info_test/3, options/3, + welcome_screen/3]). + % send --export([xhr_polling/4, xhr_streaming/4, eventsource/4, htmlfile/4, jsonp/4]). +-export([eventsource/4, htmlfile/4, jsonp/4, + xhr_polling/4, xhr_streaming/4]). + % recv --export([xhr_send/4, jsonp_send/4]). +-export([jsonp_send/4, xhr_send/4]). + % misc --export([websocket/3, rawwebsocket/3]). +-export([rawwebsocket/3, websocket/3]). -include("sockjs_internal.hrl"). %% -------------------------------------------------------------------------- --define(IFRAME, " - - - - - - - - -

Don't panic!

-

This is a SockJS hidden iframe. It's used for cross domain magic.

- -"). - --define(IFRAME_HTMLFILE, " - - - -

Don't panic!

- "). +-define(IFRAME, + "\n\n\n \n \n \n \n<" + "/head>\n\n

Don't panic!

\n " + "

This is a SockJS hidden iframe. " + "It's used for cross domain magic.

\n\n"). + +-define(IFRAME_HTMLFILE, + "\n\n \n \n

Don't panic!

\n " + " "). %% -------------------------------------------------------------------------- --spec welcome_screen(req(), headers(), service()) -> req(). +-spec welcome_screen(req(), headers(), + service()) -> req(). + welcome_screen(Req, Headers, _Service) -> H = [{"Content-Type", "text/plain; charset=UTF-8"}], sockjs_http:reply(200, H ++ Headers, - "Welcome to SockJS!\n", Req). + "Welcome to SockJS!\n", Req). -spec options(req(), headers(), service()) -> req(). + options(Req, Headers, _Service) -> sockjs_http:reply(204, Headers, "", Req). -spec iframe(req(), headers(), service()) -> req(). -iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) -> + +iframe(Req, Headers, + #service{sockjs_url = SockjsUrl}) -> IFrame = io_lib:format(?IFRAME, [SockjsUrl]), - MD5 = "\"" ++ binary_to_list(base64:encode(erlang:md5(IFrame))) ++ "\"", + MD5 = "\"" ++ + binary_to_list(base64:encode(erlang:md5(IFrame))) ++ + "\"", {H, Req2} = sockjs_http:header('if-none-match', Req), case H of - MD5 -> sockjs_http:reply(304, Headers, "", Req2); - _ -> sockjs_http:reply( - 200, [{"Content-Type", "text/html; charset=UTF-8"}, - {"ETag", MD5}] ++ Headers, IFrame, Req2) + MD5 -> sockjs_http:reply(304, Headers, "", Req2); + _ -> + sockjs_http:reply(200, + [{"Content-Type", "text/html; charset=UTF-8"}, + {"ETag", MD5}] + ++ Headers, + IFrame, Req2) end. - -spec info_test(req(), headers(), service()) -> req(). -info_test(Req, Headers, #service{websocket = Websocket, - cookie_needed = CookieNeeded}) -> + +info_test(Req, Headers, + #service{websocket = Websocket, + cookie_needed = CookieNeeded}) -> I = [{websocket, Websocket}, - {cookie_needed, CookieNeeded}, - {origins, [<<"*:*">>]}, - {entropy, sockjs_util:rand32()}], + {cookie_needed, CookieNeeded}, {origins, [<<"*:*">>]}, + {entropy, sockjs_util:rand32()}], D = sockjs_json:encode({I}), - H = [{"Content-Type", "application/json; charset=UTF-8"}], + H = [{"Content-Type", + "application/json; charset=UTF-8"}], sockjs_http:reply(200, H ++ Headers, D, Req). %% -------------------------------------------------------------------------- --spec xhr_polling(req(), headers(), service(), session()) -> req(). +-spec xhr_polling(req(), headers(), service(), + session()) -> req(). + xhr_polling(Req, Headers, Service, Session) -> Req1 = chunk_start(Req, Headers), reply_loop(Req1, Session, 1, fun fmt_xhr/1, Service). --spec xhr_streaming(req(), headers(), service(), session()) -> req(). -xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit}, - Session) -> +-spec xhr_streaming(req(), headers(), service(), + session()) -> req(). + +xhr_streaming(Req, Headers, + Service = #service{response_limit = ResponseLimit}, + Session) -> Req1 = chunk_start(Req, Headers), %% IE requires 2KB prefix: %% http://blogs.msdn.com/b/ieinternals/archive/2010/04/06/comet-streaming-in-internet-explorer-with-xmlhttprequest-and-xdomainrequest.aspx - Req2 = chunk(Req1, list_to_binary(string:copies("h", 2048)), - fun fmt_xhr/1), - reply_loop(Req2, Session, ResponseLimit, fun fmt_xhr/1, Service). - --spec eventsource(req(), headers(), service(), session()) -> req(). -eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit}, - SessionId) -> - Req1 = chunk_start(Req, Headers, "text/event-stream; charset=UTF-8"), + Req2 = chunk(Req1, + list_to_binary(string:copies("h", 2048)), + fun fmt_xhr/1), + reply_loop(Req2, Session, ResponseLimit, fun fmt_xhr/1, + Service). + +-spec eventsource(req(), headers(), service(), + session()) -> req(). + +eventsource(Req, Headers, + Service = #service{response_limit = ResponseLimit}, + SessionId) -> + Req1 = chunk_start(Req, Headers, + "text/event-stream; charset=UTF-8"), Req2 = chunk(Req1, <<$\r, $\n>>), - reply_loop(Req2, SessionId, ResponseLimit, fun fmt_eventsource/1, Service). + reply_loop(Req2, SessionId, ResponseLimit, + fun fmt_eventsource/1, Service). +-spec htmlfile(req(), headers(), service(), + session()) -> req(). --spec htmlfile(req(), headers(), service(), session()) -> req(). -htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit}, - SessionId) -> +htmlfile(Req, Headers, + Service = #service{response_limit = ResponseLimit}, + SessionId) -> S = fun (Req1, CB) -> - Req2 = chunk_start(Req1, Headers, "text/html; charset=UTF-8"), - IFrame = iolist_to_binary(io_lib:format(?IFRAME_HTMLFILE, [CB])), - %% Safari needs at least 1024 bytes to parse the - %% website. Relevant: - %% http://code.google.com/p/browsersec/wiki/Part2#Survey_of_content_sniffing_behaviors - Padding = string:copies(" ", 1024 - size(IFrame)), - Req3 = chunk(Req2, [IFrame, Padding, <<"\r\n\r\n">>]), - reply_loop(Req3, SessionId, ResponseLimit, fun fmt_htmlfile/1, Service) - end, + Req2 = chunk_start(Req1, Headers, + "text/html; charset=UTF-8"), + IFrame = + iolist_to_binary(io_lib:format(?IFRAME_HTMLFILE, [CB])), + %% Safari needs at least 1024 bytes to parse the + %% website. Relevant: + %% http://code.google.com/p/browsersec/wiki/Part2#Survey_of_content_sniffing_behaviors + Padding = string:copies(" ", 1024 - size(IFrame)), + Req3 = chunk(Req2, [IFrame, Padding, <<"\r\n\r\n">>]), + reply_loop(Req3, SessionId, ResponseLimit, + fun fmt_htmlfile/1, Service) + end, verify_callback(Req, S). --spec jsonp(req(), headers(), service(), session()) -> req(). +-spec jsonp(req(), headers(), service(), + session()) -> req(). + jsonp(Req, Headers, Service, SessionId) -> S = fun (Req1, CB) -> - Req2 = chunk_start(Req1, Headers), - reply_loop(Req2, SessionId, 1, - fun (Body) -> fmt_jsonp(Body, CB) end, Service) - end, + Req2 = chunk_start(Req1, Headers), + reply_loop(Req2, SessionId, 1, + fun (Body) -> fmt_jsonp(Body, CB) end, Service) + end, verify_callback(Req, S). verify_callback(Req, Success) -> {CB, Req1} = sockjs_http:callback(Req), case CB of - undefined -> - sockjs_http:reply(500, [], "\"callback\" parameter required", Req1); - _ -> - Success(Req1, CB) + undefined -> + sockjs_http:reply(500, [], + "\"callback\" parameter required", Req1); + _ -> Success(Req1, CB) end. %% -------------------------------------------------------------------------- --spec xhr_send(req(), headers(), service(), session()) -> req(). +-spec xhr_send(req(), headers(), service(), + session()) -> req(). + xhr_send(Req, Headers, _Service, Session) -> {Body, Req1} = sockjs_http:body(Req), case handle_recv(Req1, Body, Session) of - {error, Req2} -> - Req2; - ok -> - H = [{"content-type", "text/plain; charset=UTF-8"}], - sockjs_http:reply(204, H ++ Headers, "", Req1) + {error, Req2} -> Req2; + ok -> + H = [{"content-type", "text/plain; charset=UTF-8"}], + sockjs_http:reply(204, H ++ Headers, "", Req1) end. --spec jsonp_send(req(), headers(), service(), session()) -> req(). +-spec jsonp_send(req(), headers(), service(), + session()) -> req(). + jsonp_send(Req, Headers, _Service, Session) -> {Body, Req1} = sockjs_http:body_qs(Req), case handle_recv(Req1, Body, Session) of - {error, Req2} -> - Req2; - ok -> - H = [{"content-type", "text/plain; charset=UTF-8"}], - sockjs_http:reply(200, H ++ Headers, "ok", Req1) + {error, Req2} -> Req2; + ok -> + H = [{"content-type", "text/plain; charset=UTF-8"}], + sockjs_http:reply(200, H ++ Headers, "ok", Req1) end. handle_recv(Req, Body, Session) -> case Body of - _Any when Body =:= <<>> -> - {error, sockjs_http:reply(500, [], "Payload expected.", Req)}; - _Any -> - case sockjs_json:decode(Body) of - {ok, Decoded} when is_list(Decoded)-> - sockjs_session:received(Decoded, Session), - ok; - {error, _} -> - {error, sockjs_http:reply(500, [], - "Broken JSON encoding.", Req)} - end + _Any when Body =:= <<>> -> + {error, + sockjs_http:reply(500, [], "Payload expected.", Req)}; + _Any -> + case sockjs_json:decode(Body) of + {ok, Decoded} when is_list(Decoded) -> + sockjs_session:received(Decoded, Session), ok; + {error, _} -> + {error, + sockjs_http:reply(500, [], "Broken JSON encoding.", + Req)} + end end. %% -------------------------------------------------------------------------- --define(STILL_OPEN, {2010, "Another connection still open"}). +-define(STILL_OPEN, + {2010, "Another connection still open"}). chunk_start(Req, Headers) -> - chunk_start(Req, Headers, "application/javascript; charset=UTF-8"). + chunk_start(Req, Headers, + "application/javascript; charset=UTF-8"). + chunk_start(Req, Headers, ContentType) -> - sockjs_http:chunk_start(200, [{"Content-Type", ContentType}] ++ Headers, - Req). + sockjs_http:chunk_start(200, + [{"Content-Type", ContentType}] ++ Headers, Req). -reply_loop(Req, SessionId, ResponseLimit, Fmt, Service) -> +reply_loop(Req, SessionId, ResponseLimit, Fmt, + Service) -> Req0 = sockjs_http:hook_tcp_close(Req), case sockjs_session:reply(SessionId) of - wait -> receive - %% In Cowboy we need to capture async - %% messages from the tcp connection - - %% ie: {active, once}. - {tcp_closed, _} -> - Req0; - %% In Cowboy we may in theory get real - %% http requests, this is bad. - {tcp, _S, Data} -> - error_logger:error_msg( - "Received unexpected data on a " - "long-polling http connection: ~p. " - "Connection aborted.~n", - [Data]), - Req1 = sockjs_http:abruptly_kill(Req), - Req1; - go -> - Req1 = sockjs_http:unhook_tcp_close(Req0), - reply_loop(Req1, SessionId, ResponseLimit, - Fmt, Service) - end; - session_in_use -> Frame = sockjs_util:encode_frame({close, ?STILL_OPEN}), - chunk_end(Req0, Frame, Fmt); - {close, Frame} -> Frame1 = sockjs_util:encode_frame(Frame), - chunk_end(Req0, Frame1, Fmt); - {ok, Frame} -> Frame1 = sockjs_util:encode_frame(Frame), - Frame2 = iolist_to_binary(Frame1), - Req2 = chunk(Req0, Frame2, Fmt), - reply_loop0(Req2, SessionId, - ResponseLimit - size(Frame2), - Fmt, Service) + wait -> + receive + %% In Cowboy we need to capture async + %% messages from the tcp connection - + %% ie: {active, once}. + {tcp_closed, _} -> Req0; + %% In Cowboy we may in theory get real + %% http requests, this is bad. + {tcp, _S, Data} -> + error_logger:error_msg("Received unexpected data on a long-polling " + "http connection: ~p. Connection aborted.~n", + [Data]), + Req1 = sockjs_http:abruptly_kill(Req), + Req1; + go -> + Req1 = sockjs_http:unhook_tcp_close(Req0), + reply_loop(Req1, SessionId, ResponseLimit, Fmt, Service) + end; + session_in_use -> + Frame = sockjs_util:encode_frame({close, ?STILL_OPEN}), + chunk_end(Req0, Frame, Fmt); + {close, Frame} -> + Frame1 = sockjs_util:encode_frame(Frame), + chunk_end(Req0, Frame1, Fmt); + {ok, Frame} -> + Frame1 = sockjs_util:encode_frame(Frame), + Frame2 = iolist_to_binary(Frame1), + Req2 = chunk(Req0, Frame2, Fmt), + reply_loop0(Req2, SessionId, + ResponseLimit - size(Frame2), Fmt, Service) end. -reply_loop0(Req, _SessionId, ResponseLimit, _Fmt, _Service) when ResponseLimit =< 0 -> +reply_loop0(Req, _SessionId, ResponseLimit, _Fmt, + _Service) + when ResponseLimit =< 0 -> chunk_end(Req); -reply_loop0(Req, SessionId, ResponseLimit, Fmt, Service) -> +reply_loop0(Req, SessionId, ResponseLimit, Fmt, + Service) -> reply_loop(Req, SessionId, ResponseLimit, Fmt, Service). -chunk(Req, Body) -> - {_, Req1} = sockjs_http:chunk(Body, Req), - Req1. +chunk(Req, Body) -> + {_, Req1} = sockjs_http:chunk(Body, Req), Req1. + chunk(Req, Body, Fmt) -> chunk(Req, Fmt(Body)). chunk_end(Req) -> sockjs_http:chunk_end(Req). -chunk_end(Req, Body, Fmt) -> Req1 = chunk(Req, Body, Fmt), - chunk_end(Req1). + +chunk_end(Req, Body, Fmt) -> + Req1 = chunk(Req, Body, Fmt), chunk_end(Req1). -spec fmt_xhr(iodata()) -> iodata(). + fmt_xhr(Body) -> [Body, "\n"]. -spec fmt_eventsource(iodata()) -> iodata(). + fmt_eventsource(Body) -> - Escaped = sockjs_util:url_escape(binary_to_list(iolist_to_binary(Body)), - "%\r\n\0"), %% $% must be first! + Escaped = + sockjs_util:url_escape(binary_to_list(iolist_to_binary(Body)), + "%\r\n\000"), %% $% must be first! [<<"data: ">>, Escaped, <<"\r\n\r\n">>]. -spec fmt_htmlfile(iodata()) -> iodata(). + fmt_htmlfile(Body) -> Double = sockjs_json:encode(iolist_to_binary(Body)), [<<"\r\n">>]. -spec fmt_jsonp(iodata(), iodata()) -> iodata(). + fmt_jsonp(Body, Callback) -> %% Yes, JSONed twice, there isn't a a better way, we must pass %% a string back, and the script, will be evaled() by the %% browser. - [Callback, "(", sockjs_json:encode(iolist_to_binary(Body)), ");\r\n"]. + [Callback, "(", + sockjs_json:encode(iolist_to_binary(Body)), ");\r\n"]. %% -------------------------------------------------------------------------- -spec websocket(req(), headers(), service()) -> req(). + websocket(Req, Headers, Service) -> - {_Any, Req1, {R1, R2}} = sockjs_handler:is_valid_ws(Service, Req), + {_Any, Req1, {R1, R2}} = + sockjs_handler:is_valid_ws(Service, Req), case {R1, R2} of - {false, _} -> - sockjs_http:reply(400, Headers, - "Can \"Upgrade\" only to \"WebSocket\".", Req1); - {_, false} -> - sockjs_http:reply(400, Headers, - "\"Connection\" must be \"Upgrade\"", Req1); - {true, true} -> - sockjs_http:reply(400, Headers, - "This WebSocket request can't be handled.", Req1) + {false, _} -> + sockjs_http:reply(400, Headers, + "Can \"Upgrade\" only to \"WebSocket\".", Req1); + {_, false} -> + sockjs_http:reply(400, Headers, + "\"Connection\" must be \"Upgrade\"", Req1); + {true, true} -> + sockjs_http:reply(400, Headers, + "This WebSocket request can't be handled.", Req1) end. --spec rawwebsocket(req(), headers(), service()) -> req(). +-spec rawwebsocket(req(), headers(), + service()) -> req(). + rawwebsocket(Req, Headers, Service) -> websocket(Req, Headers, Service). diff --git a/src/sockjs_app.erl b/src/sockjs_app.erl index 1b8e77c..76da3da 100644 --- a/src/sockjs_app.erl +++ b/src/sockjs_app.erl @@ -5,10 +5,10 @@ -export([start/2, stop/1]). -spec start(_, _) -> {ok, pid()}. + start(_StartType, _StartArgs) -> - sockjs_session:init(), - sockjs_session_sup:start_link(). + sockjs_session:init(), sockjs_session_sup:start_link(). -spec stop(_) -> ok. -stop(_State) -> - ok. + +stop(_State) -> ok. diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index 6a211c7..6e7a543 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -1,93 +1,101 @@ -module(sockjs_cowboy_handler). --behaviour(cowboy_http_handler). --behaviour(cowboy_websocket_handler). + +-behaviour(cowboy_handler). %% Cowboy http callbacks -export([init/2, terminate/3]). %% Cowboy ws callbacks --export([websocket_init/3, websocket_handle/3, - websocket_info/3, websocket_terminate/3]). +-export([websocket_handle/3, websocket_info/3, + websocket_init/3, websocket_terminate/3]). -include("sockjs_internal.hrl"). %% -------------------------------------------------------------------------- init(#{ref := http} = Req, Service) -> - case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of - {true, {cowboy, _Req1}, _Reason} -> - {upgrade, protocol, cowboy_websocket}; - {false, {cowboy, Req1}, _Reason} -> - {ok, Req1, Service} + case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) + of + {true, _Reason} -> + {upgrade, protocol, cowboy_websocket}; + {false, _Reason} -> {ok, Req, Service} end. -terminate(_Reason, _Req, _Service) -> - ok. +terminate(_Reason, _Req, _Service) -> ok. %% -------------------------------------------------------------------------- websocket_init(_TransportName, Req, - Service = #service{logger = Logger, - subproto_pref = SubProtocolPref}) -> - Req3 = case cowboy_req:header(<<"Sec-Websocket-Protocol">>, Req) of - {undefined, Req1} -> - Req1; - {SubProtocols, Req1} -> - SelectedSubProtocol = - choose_subprotocol_bin(SubProtocols, SubProtocolPref), - {ok, Req2} = cowboy_req:set_resp_header( - <<"Sec-Websocket-Protocol">>, - SelectedSubProtocol, Req1), - Req2 - end, - - Req4 = Logger(Service, {cowboy, Req3}, websocket), - - Service1 = Service#service{disconnect_delay = 5*60*1000}, - - {Info, Req5} = sockjs_handler:extract_info(Req4), - SessionPid = sockjs_session:maybe_create(undefined, Service1, Info), - {RawWebsocket, {cowboy, Req7}} = - case sockjs_handler:get_action(Service, Req5) of - {{match, WS}, Req6} when WS =:= websocket orelse - WS =:= rawwebsocket -> - {WS, Req6} - end, + Service = #service{logger = Logger, + subproto_pref = SubProtocolPref}) -> + Req1 = case cowboy_req:header('Sec-Websocket-Protocol', + Req) + of + undefined -> Req; + SubProtocols -> + SelectedSubProtocol = + choose_subprotocol_bin(SubProtocols, SubProtocolPref), + cowboy_req:set_resp_header(#{<<"Sec-Websocket-Protocol">> + => SelectedSubProtocol}, + Req) + end, + Logger(Service, {cowboy, Req1}, websocket), + Service1 = Service#service{disconnect_delay = + 5 * 60 * 1000}, + Info = sockjs_handler:extract_info(Req1), + SessionPid = sockjs_session:maybe_create(undefined, + Service1, Info), + RawWebsocket = case sockjs_handler:get_action(Service, + Req1) + of + {match, WS} + when WS =:= websocket orelse WS =:= rawwebsocket -> + WS + end, self() ! go, - {ok, Req7, {RawWebsocket, SessionPid}}. - -websocket_handle({text, Data}, Req, {RawWebsocket, SessionPid} = S) -> - case sockjs_ws_handler:received(RawWebsocket, SessionPid, Data) of - ok -> {ok, Req, S}; - shutdown -> {shutdown, Req, S} + {ok, Req1, {RawWebsocket, SessionPid}}. + +websocket_handle({text, Data}, Req, + {RawWebsocket, SessionPid} = S) -> + case sockjs_ws_handler:received(RawWebsocket, + SessionPid, Data) + of + ok -> {ok, Req, S}; + shutdown -> {shutdown, Req, S} end; websocket_handle(_Unknown, Req, S) -> {shutdown, Req, S}. -websocket_info(go, Req, {RawWebsocket, SessionPid} = S) -> - case sockjs_ws_handler:reply(RawWebsocket, SessionPid) of - wait -> {ok, Req, S}; - {ok, Data} -> self() ! go, - {reply, {text, Data}, Req, S}; - {close, <<>>} -> {shutdown, Req, S}; - {close, Data} -> self() ! shutdown, - {reply, {text, Data}, Req, S} +websocket_info(go, Req, + {RawWebsocket, SessionPid} = S) -> + case sockjs_ws_handler:reply(RawWebsocket, SessionPid) + of + wait -> {ok, Req, S}; + {ok, Data} -> + self() ! go, {reply, {text, Data}, Req, S}; + {close, <<>>} -> {shutdown, Req, S}; + {close, Data} -> + self() ! shutdown, {reply, {text, Data}, Req, S} end; -websocket_info(shutdown, Req, S) -> - {shutdown, Req, S}. +websocket_info(shutdown, Req, S) -> {shutdown, Req, S}. -websocket_terminate(_Reason, _Req, {RawWebsocket, SessionPid}) -> - sockjs_ws_handler:close(RawWebsocket, SessionPid), - ok. +websocket_terminate(_Reason, _Req, + {RawWebsocket, SessionPid}) -> + sockjs_ws_handler:close(RawWebsocket, SessionPid), ok. %% -------------------------------------------------------------------------- choose_subprotocol_bin(SubProtocols, Pref) -> choose_subprotocol(re:split(SubProtocols, ", *"), Pref). + choose_subprotocol(SubProtocols, undefined) -> erlang:hd(lists:reverse(lists:sort(SubProtocols))); choose_subprotocol(SubProtocols, Pref) -> - case lists:filter(fun (E) -> lists:member(E, SubProtocols) end, Pref) of - [Hd | _] -> Hd; - [] -> choose_subprotocol(SubProtocols, undefined) + case lists:filter(fun (E) -> + lists:member(E, SubProtocols) + end, + Pref) + of + [Hd | _] -> Hd; + [] -> choose_subprotocol(SubProtocols, undefined) end. diff --git a/src/sockjs_filters.erl b/src/sockjs_filters.erl index c4176bd..adf150e 100644 --- a/src/sockjs_filters.erl +++ b/src/sockjs_filters.erl @@ -1,7 +1,7 @@ -module(sockjs_filters). --export([cache_for/2, h_sid/2, h_no_cache/2, xhr_cors/2, - xhr_options_post/2, xhr_options_get/2]). +-export([cache_for/2, h_no_cache/2, h_sid/2, xhr_cors/2, + xhr_options_get/2, xhr_options_post/2]). -include("sockjs_internal.hrl"). @@ -10,60 +10,78 @@ %% -------------------------------------------------------------------------- -spec cache_for(req(), headers()) -> {headers(), req()}. + cache_for(Req, Headers) -> - Expires = calendar:gregorian_seconds_to_datetime( - calendar:datetime_to_gregorian_seconds( - calendar:now_to_datetime(now())) + ?YEAR), - H = [{"Cache-Control", "public, max-age=" ++ integer_to_list(?YEAR)}, - {"Expires", httpd_util:rfc1123_date(Expires)}], + Expires = + calendar:gregorian_seconds_to_datetime(calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(now())) + + (?YEAR)), + H = [{"Cache-Control", + "public, max-age=" ++ integer_to_list(?YEAR)}, + {"Expires", httpd_util:rfc1123_date(Expires)}], {H ++ Headers, Req}. -spec h_sid(req(), headers()) -> {headers(), req()}. + h_sid(Req, Headers) -> %% Some load balancers do sticky sessions, but only if there is %% a JSESSIONID cookie. If this cookie isn't yet set, we shall %% set it to a dumb value. It doesn't really matter what, as %% session information is usually added by the load balancer. - {C, Req2} = sockjs_http:jsessionid(Req), + C = sockjs_http:jsessionid(Req), H = case C of - undefined -> [{"Set-Cookie", "JSESSIONID=dummy; path=/"}]; - Jsid -> [{"Set-Cookie", "JSESSIONID=" ++ Jsid ++ "; path=/"}] - end, - {H ++ Headers, Req2}. + undefined -> + [{"Set-Cookie", "JSESSIONID=dummy; path=/"}]; + Jsid -> + [{"Set-Cookie", "JSESSIONID=" ++ Jsid ++ "; path=/"}] + end, + H ++ Headers. + +-spec h_no_cache(req(), headers()) -> {headers(), + req()}. --spec h_no_cache(req(), headers()) -> {headers(), req()}. h_no_cache(Req, Headers) -> - H = [{"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"}], + H = [{"Cache-Control", + "no-store, no-cache, must-revalidate, " + "max-age=0"}], {H ++ Headers, Req}. -spec xhr_cors(req(), headers()) -> {headers(), req()}. + xhr_cors(Req, Headers) -> - {OriginH, Req1} = sockjs_http:header('origin', Req), - Origin = case OriginH of - "null" -> "*"; - undefined -> "*"; - O -> O - end, - {HeadersH, Req2} = sockjs_http:header( - 'access-control-request-headers', Req1), + {OriginH, Req1} = sockjs_http:header(origin, Req), + Origin = case OriginH of + "null" -> "*"; + undefined -> "*"; + O -> O + end, + {HeadersH, Req2} = + sockjs_http:header('access-control-request-headers', + Req1), AllowHeaders = case HeadersH of - undefined -> []; - V -> [{"Access-Control-Allow-Headers", V}] - end, - H = [{"Access-Control-Allow-Origin", Origin}, - {"Access-Control-Allow-Credentials", "true"}], + undefined -> []; + V -> [{"Access-Control-Allow-Headers", V}] + end, + H = [{"Access-Control-Allow-Origin", Origin}, + {"Access-Control-Allow-Credentials", "true"}], {H ++ AllowHeaders ++ Headers, Req2}. --spec xhr_options_post(req(), headers()) -> {headers(), req()}. +-spec xhr_options_post(req(), headers()) -> {headers(), + req()}. + xhr_options_post(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "POST"]). --spec xhr_options_get(req(), headers()) -> {headers(), req()}. +-spec xhr_options_get(req(), headers()) -> {headers(), + req()}. + xhr_options_get(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "GET"]). --spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}. +-spec xhr_options(req(), headers(), + [string()]) -> {headers(), req()}. + xhr_options(Req, Headers, Methods) -> - H = [{"Access-Control-Allow-Methods", string:join(Methods, ", ")}, - {"Access-Control-Max-Age", integer_to_list(?YEAR)}], + H = [{"Access-Control-Allow-Methods", + string:join(Methods, ", ")}, + {"Access-Control-Max-Age", integer_to_list(?YEAR)}], {H ++ Headers, Req}. diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl index d7bada9..7ac69b8 100644 --- a/src/sockjs_handler.erl +++ b/src/sockjs_handler.erl @@ -1,88 +1,93 @@ -module(sockjs_handler). -export([init_state/4]). --export([is_valid_ws/2, get_action/2]). + +-export([get_action/2, is_valid_ws/2]). + -export([dispatch_req/2, handle_req/2]). + -export([extract_info/1]). -include("sockjs_internal.hrl"). --define(SOCKJS_URL, "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"). +-define(SOCKJS_URL, + "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"). %% -------------------------------------------------------------------------- --spec init_state(binary(), callback(), any(), list(tuple())) -> service(). +-spec init_state(binary(), callback(), any(), + [tuple()]) -> service(). + init_state(Prefix, Callback, State, Options) -> #service{prefix = binary_to_list(Prefix), - callback = Callback, - state = State, - sockjs_url = - proplists:get_value(sockjs_url, Options, ?SOCKJS_URL), - websocket = - proplists:get_value(websocket, Options, true), - cookie_needed = - proplists:get_value(cookie_needed, Options, false), - disconnect_delay = - proplists:get_value(disconnect_delay, Options, 5000), - heartbeat_delay = - proplists:get_value(heartbeat_delay, Options, 25000), - response_limit = - proplists:get_value(response_limit, Options, 128*1024), - logger = - proplists:get_value(logger, Options, fun default_logger/3), - subproto_pref = - proplists:get_value(subproto_pref, Options) - }. + callback = Callback, state = State, + sockjs_url = + proplists:get_value(sockjs_url, Options, ?SOCKJS_URL), + websocket = + proplists:get_value(websocket, Options, true), + cookie_needed = + proplists:get_value(cookie_needed, Options, false), + disconnect_delay = + proplists:get_value(disconnect_delay, Options, 5000), + heartbeat_delay = + proplists:get_value(heartbeat_delay, Options, 25000), + response_limit = + proplists:get_value(response_limit, Options, + 128 * 1024), + logger = + proplists:get_value(logger, Options, + fun default_logger/3), + subproto_pref = + proplists:get_value(subproto_pref, Options)}. %% -------------------------------------------------------------------------- --spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}. +-spec is_valid_ws(service(), req()) -> {boolean(), + tuple()}. + is_valid_ws(Service, Req) -> case get_action(Service, Req) of - {{match, WS}, Req1} when WS =:= websocket orelse - WS =:= rawwebsocket -> - valid_ws_request(Service, Req1); - {_Else, Req1} -> - {false, Req1, {}} + {{match, WS}, Req1} + when WS =:= websocket orelse WS =:= rawwebsocket -> + valid_ws_request(Service, Req1); + {_Else, _Req1} -> {false, {}} end. --spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}. +-spec valid_ws_request(service(), req()) -> {boolean(), + tuple()}. + valid_ws_request(_Service, Req) -> - {R1, Req1} = valid_ws_upgrade(Req), - {R2, Req2} = valid_ws_connection(Req1), - {R1 and R2, Req2, {R1, R2}}. + R1 = valid_ws_upgrade(Req), + R2 = valid_ws_connection(Req), + {R1 and R2, {R1, R2}}. valid_ws_upgrade(Req) -> - case sockjs_http:header('upgrade', Req) of - {undefined, Req2} -> - {false, Req2}; - {V, Req2} -> - case string:to_lower(V) of - "websocket" -> - {true, Req2}; - _Else -> - {false, Req2} - end + case sockjs_http:header(upgrade, Req) of + undefined -> false; + {V, _Req2} -> + case string:to_lower(V) of + "websocket" -> true; + _Else -> false + end end. valid_ws_connection(Req) -> - case sockjs_http:header('connection', Req) of - {undefined, Req2} -> - {false, Req2}; - {V, Req2} -> - Vs = [string:strip(T) || - T <- string:tokens(string:to_lower(V), ",")], - {lists:member("upgrade", Vs), Req2} + case sockjs_http:header(connection, Req) of + undefined -> false; + V -> + Vs = [string:strip(T) + || T <- string:tokens(string:to_lower(V), ",")], + {lists:member("upgrade", Vs), Req} end. --spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}. +-spec get_action(service(), req()) -> nomatch | + {match, atom()}. + get_action(Service, Req) -> - {Dispatch, Req1} = dispatch_req(Service, Req), + Dispatch = dispatch_req(Service, Req), case Dispatch of - {match, {_, Action, _, _, _}} -> - {{match, Action}, Req1}; - _Else -> - {nomatch, Req1} + {match, {_, Action, _, _, _}} -> {match, Action}; + _Else -> nomatch end. %% -------------------------------------------------------------------------- @@ -90,52 +95,57 @@ get_action(Service, Req) -> strip_prefix(LongPath, Prefix) -> {A, B} = lists:split(length(Prefix), LongPath), case Prefix of - A -> {ok, B}; - _Any -> {error, io_lib:format("Wrong prefix: ~p is not ~p", [A, Prefix])} + A -> {ok, B}; + _Any -> + {error, + io_lib:format("Wrong prefix: ~p is not ~p", + [A, Prefix])} end. +-type dispatch_result() :: nomatch | + {match, + {send | recv | none, atom(), server(), session(), + [atom()]}} | + {bad_method, [atom()]}. --type(dispatch_result() :: - nomatch | - {match, {send | recv | none , atom(), - server(), session(), list(atom())}} | - {bad_method, list(atom())}). +-spec dispatch_req(service(), + req()) -> dispatch_result(). --spec dispatch_req(service(), req()) -> {dispatch_result(), req()}. dispatch_req(#service{prefix = Prefix}, Req) -> Method = sockjs_http:method(Req), LongPath = sockjs_http:path(Req), {ok, PathRemainder} = strip_prefix(LongPath, Prefix), - {dispatch(Method, PathRemainder), Req}. + dispatch(Method, PathRemainder). + +-spec dispatch(atom(), + nonempty_string()) -> dispatch_result(). --spec dispatch(atom(), nonempty_string()) -> dispatch_result(). dispatch(Method, Path) -> - lists:foldl( - fun ({Match, MethodFilters}, nomatch) -> - case Match(Path) of - nomatch -> - nomatch; - [Server, Session] -> - case lists:keyfind(Method, 1, MethodFilters) of - false -> - Methods = [ K || - {K, _, _, _} <- MethodFilters], - {bad_method, Methods}; - {_Method, Type, A, Filters} -> - {match, {Type, A, Server, Session, Filters}} - end - end; - (_, Result) -> - Result - end, nomatch, filters()). + lists:foldl(fun ({Match, MethodFilters}, nomatch) -> + case Match(Path) of + nomatch -> nomatch; + [Server, Session] -> + case lists:keyfind(Method, 1, MethodFilters) of + false -> + Methods = [K + || {K, _, _, _} + <- MethodFilters], + {bad_method, Methods}; + {_Method, Type, A, Filters} -> + {match, {Type, A, Server, Session, Filters}} + end + end; + (_, Result) -> Result + end, + nomatch, filters()). %% -------------------------------------------------------------------------- filters() -> - OptsFilters = [h_sid, xhr_cors, cache_for, xhr_options_post], + %OptsFilters = [h_sid, xhr_cors, cache_for, xhr_options_post], %% websocket does not actually go via handle_req/3 but we need %% something in dispatch/2 - [{t("/websocket"), [{'GET', none, websocket, []}]}, + [{t("/websocket"), [{'GET', none, websocket, []}]}, %{t("/xhr_send"), [{'POST', recv, xhr_send, [h_sid, h_no_cache, xhr_cors]}, % {'OPTIONS', none, options, OptsFilters}]}, %{t("/xhr"), [{'POST', send, xhr_polling, [h_sid, h_no_cache, xhr_cors]}, @@ -146,85 +156,97 @@ filters() -> %{t("/jsonp"), [{'GET', send, jsonp, [h_sid, h_no_cache]}]}, %{t("/eventsource"), [{'GET', send, eventsource, [h_sid, h_no_cache]}]}, %{t("/htmlfile"), [{'GET', send, htmlfile, [h_sid, h_no_cache]}]}, - {p("/websocket"), [{'GET', none, rawwebsocket, []}]}, - {p(""), [{'GET', none, welcome_screen, []}]}, + {p("/websocket"), [{'GET', none, rawwebsocket, []}]}, + {p(""), [{'GET', none, welcome_screen, []}]}, %{p("/iframe[0-9-.a-z_]*.html"), [{'GET', none, iframe, [cache_for]}]}, - {p("/info"), [{'GET', none, info_test, [h_no_cache, xhr_cors]}, - {'OPTIONS', none, options, [h_sid, xhr_cors, cache_for, xhr_options_get]}]} - ]. + {p("/info"), + [{'GET', none, info_test, [h_no_cache, xhr_cors]}, + {'OPTIONS', none, options, + [h_sid, xhr_cors, cache_for, xhr_options_get]}]}]. -p(S) -> fun (Path) -> re(Path, "^" ++ S ++ "[/]?\$") end. -t(S) -> fun (Path) -> re(Path, "^/([^/.]+)/([^/.]+)" ++ S ++ "[/]?\$") end. +p(S) -> fun (Path) -> re(Path, "^" ++ S ++ "[/]?$") end. + +t(S) -> + fun (Path) -> + re(Path, "^/([^/.]+)/([^/.]+)" ++ S ++ "[/]?$") + end. re(Path, S) -> - case re:run(Path, S, [{capture, all_but_first, list}]) of - nomatch -> nomatch; - {match, []} -> [dummy, dummy]; - {match, [Server, Session]} -> [Server, Session] + case re:run(Path, S, [{capture, all_but_first, list}]) + of + nomatch -> nomatch; + {match, []} -> [dummy, dummy]; + {match, [Server, Session]} -> [Server, Session] end. %% -------------------------------------------------------------------------- -spec handle_req(service(), req()) -> req(). -handle_req(Service = #service{logger = Logger}, Req) -> - Req0 = Logger(Service, Req, http), - {Dispatch, Req1} = dispatch_req(Service, Req0), - handle(Dispatch, Service, Req1). +handle_req(Service = #service{logger = Logger}, Req) -> + Logger(Service, Req, http), + Dispatch = dispatch_req(Service, Req), + handle(Dispatch, Service, Req). handle(nomatch, _Service, Req) -> sockjs_http:reply(404, [], "", Req); - handle({bad_method, Methods}, _Service, Req) -> - MethodsStr = string:join([atom_to_list(M) || M <- Methods], - ", "), + MethodsStr = string:join([atom_to_list(M) + || M <- Methods], + ", "), H = [{"Allow", MethodsStr}], sockjs_http:reply(405, H, "", Req); - -handle({match, {Type, Action, _Server, Session, Filters}}, Service, Req) -> - {Headers, Req2} = lists:foldl( - fun (Filter, {Headers0, Req1}) -> - sockjs_filters:Filter(Req1, Headers0) - end, {[], Req}, Filters), +handle({match, + {Type, Action, _Server, Session, Filters}}, + Service, Req) -> + {Headers, Req2} = lists:foldl(fun (Filter, + {Headers0, Req1}) -> + sockjs_filters:Filter(Req1, Headers0) + end, + {[], Req}, Filters), case Type of - send -> - {Info, Req3} = extract_info(Req2), - _SPid = sockjs_session:maybe_create(Session, Service, Info), - sockjs_action:Action(Req3, Headers, Service, Session); - recv -> - try - sockjs_action:Action(Req2, Headers, Service, Session) - catch throw:no_session -> - {H, Req3} = sockjs_filters:h_sid(Req2, []), - sockjs_http:reply(404, H, "", Req3) - end; - none -> - sockjs_action:Action(Req2, Headers, Service) + send -> + Info = extract_info(Req2), + _SPid = sockjs_session:maybe_create(Session, Service, + Info), + sockjs_action:Action(Req2, Headers, Service, Session); + recv -> + try sockjs_action:Action(Req2, Headers, Service, + Session) + catch + no_session -> + H = sockjs_filters:h_sid(Req2, []), + sockjs_http:reply(404, H, "", Req2) + end; + none -> sockjs_action:Action(Req2, Headers, Service) end. %% -------------------------------------------------------------------------- --spec default_logger(service(), req(), websocket | http) -> req(). +-spec default_logger(service(), req(), + websocket | http) -> no_return(). + default_logger(_Service, Req, _Type) -> - {LongPath, Req1} = sockjs_http:path(Req), - {Method, Req2} = sockjs_http:method(Req1), -% io:format("~s ~s~n", [Method, LongPath]), - Req2. + % As the service need a function to call as default, we simply give a dummy function + % LongPath = sockjs_http:path(Req), + % Method = sockjs_http:method(Req). + % io:format("~s ~s~n", [Method, LongPath]), + ok. + +-spec extract_info(req()) -> info(). --spec extract_info(req()) -> {info(), req()}. extract_info(Req) -> - {Peer, Req0} = sockjs_http:peername(Req), - {Sock, Req1} = sockjs_http:sockname(Req0), - {Path, Req2} = sockjs_http:path(Req1), - {Headers, Req3} = lists:foldl(fun (H, {Acc, R0}) -> - case sockjs_http:header(H, R0) of - {undefined, R1} -> {Acc, R1}; - {V, R1} -> {[{H, V} | Acc], R1} - end - end, {[], Req2}, - ['referer', 'x-client-ip', 'x-forwarded-for', - 'x-cluster-client-ip', 'via', 'x-real-ip']), - {[{peername, Peer}, - {sockname, Sock}, - {path, Path}, - {headers, Headers}], Req3}. + Peer = sockjs_http:peername(Req), + Sock = sockjs_http:sockname(Req), + Path = sockjs_http:path(Req), + Headers = lists:foldl(fun (H, Acc) -> + case sockjs_http:header(H, Req) of + undefined -> Acc; + V -> [{H, V} | Acc] + end + end, + [], + [referer, 'x-client-ip', 'x-forwarded-for', + 'x-cluster-client-ip', via, 'x-real-ip']), + [{peername, Peer}, {sockname, Sock}, {path, Path}, + {headers, Headers}]. diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl index 93205f0..a89ea14 100644 --- a/src/sockjs_http.erl +++ b/src/sockjs_http.erl @@ -1,22 +1,30 @@ -module(sockjs_http). --export([path/1, method/1, body/1, body_qs/1, header/2, jsessionid/1, - callback/1, peername/1, sockname/1]). --export([reply/4, chunk_start/3, chunk/2, chunk_end/1]). --export([hook_tcp_close/1, unhook_tcp_close/1, abruptly_kill/1]). +-export([body/1, body_qs/1, callback/1, header/2, + jsessionid/1, method/1, path/1, peername/1, + sockname/1]). + +-export([chunk/2, chunk_end/1, chunk_start/3, reply/4]). + +-export([abruptly_kill/1, hook_tcp_close/1, + unhook_tcp_close/1]). + -include("sockjs_internal.hrl"). %% -------------------------------------------------------------------------- -spec path(req()) -> string(). -path({cowboy, Req}) -> Path = cowboy_req:path(Req), - binary_to_list(Path). + +path({cowboy, Req}) -> + Path = cowboy_req:path(Req), binary_to_list(Path). -spec method(req()) -> atom(). -method({cowboy, Req}) -> Method = cowboy_req:method(Req), - method_atom(Method). + +method({cowboy, Req}) -> + Method = cowboy_req:method(Req), method_atom(Method). -spec method_atom(binary() | atom()) -> atom(). + method_atom(<<"GET">>) -> 'GET'; method_atom(<<"PUT">>) -> 'PUT'; method_atom(<<"POST">>) -> 'POST'; @@ -33,110 +41,140 @@ method_atom('PATCH') -> 'PATCH'; method_atom('HEAD') -> 'HEAD'. -spec body(req()) -> {binary(), req()}. -body({cowboy, Req}) -> {ok, Body, Req1} = cowboy_req:read_body(Req), - {Body, {cowboy, Req1}}. + +body({cowboy, Req}) -> + {ok, Body, Req1} = body(Req, <<"">>), + {Body, {cowboy, Req1}}. + +body({cowboy, Req}, Acc) -> + case cowboy_req:read_body(Req) of + {ok, Data, Req} -> + {ok, <>, Req}; + {more, Data, Req} -> + body(Req, <>) + end. -spec body_qs(req()) -> {binary(), req()}. + body_qs(Req) -> - {H, Req1} = header('content-type', Req), + {H, Req1} = header('content-type', Req), case H of - H when H =:= "text/plain" orelse H =:= "" -> - body(Req1); - _ -> - %% By default assume application/x-www-form-urlencoded - body_qs2(Req1) + H when H =:= "text/plain" orelse H =:= "" -> body(Req1); + _ -> + %% By default assume application/x-www-form-urlencoded + body_qs2(Req1) end. + body_qs2({cowboy, Req}) -> - {ok, BodyQS, Req1} = cowboy_req:read_urlencoded_body(Req), + {ok, BodyQS, Req1} = + cowboy_req:read_urlencoded_body(Req), case proplists:get_value(<<"d">>, BodyQS) of - undefined -> - {<<>>, {cowboy, Req1}}; - V -> - {V, {cowboy, Req1}} + undefined -> {<<>>, {cowboy, Req1}}; + V -> {V, {cowboy, Req1}} end. --spec header(atom(), req()) -> {nonempty_string() | undefined, req()}. -header(K, {cowboy, Req})-> - {H, Req2} = cowboy_req:header(K, Req), - {V, Req3} = case H of - undefined -> - cowboy_req:header(atom_to_binary(K, utf8), Req2); - _ -> {H, Req2} - end, +-spec header(atom(), req()) -> {nonempty_string() | + undefined, + req()}. + +header(K, {cowboy, Req}) -> + H = cowboy_req:header(K, Req), + V = case H of + undefined -> + cowboy_req:header(atom_to_binary(K, utf8), Req); + _ -> H + end, case V of - undefined -> {undefined, {cowboy, Req3}}; - _ -> {binary_to_list(V), {cowboy, Req3}} + undefined -> undefined; + _ -> binary_to_list(V) end. --spec jsessionid(req()) -> {nonempty_string() | undefined, req()}. +-spec jsessionid(req()) -> {nonempty_string() | + undefined, + req()}. + jsessionid({cowboy, Req}) -> - {C, Req2} = cowboy_req:cookie(<<"JSESSIONID">>, Req), + #{'JSESSIONID' := C} = cowboy_req:cookie([{'JSESSIONID', + [], undefined}], + Req), case C of - _ when is_binary(C) -> - {binary_to_list(C), {cowboy, Req2}}; - undefined -> - {undefined, {cowboy, Req2}} + _ when is_binary(C) -> {binary_to_list(C), cowboy}; + undefined -> {undefined, cowboy} end. --spec callback(req()) -> {nonempty_string() | undefined, req()}. +-spec callback(req()) -> {nonempty_string() | undefined, + req()}. + callback({cowboy, Req}) -> {CB, Req1} = cowboy_req:qs_val(<<"c">>, Req), case CB of - undefined -> {undefined, {cowboy, Req1}}; - _ -> {binary_to_list(CB), {cowboy, Req1}} + undefined -> {undefined, {cowboy, Req1}}; + _ -> {binary_to_list(CB), {cowboy, Req1}} end. --spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}. -peername({cowboy, Req}) -> - {P, Req1} = cowboy_req:peer(Req), - {P, {cowboy, Req1}}. +-spec peername(req()) -> {inet:ip_address(), + non_neg_integer()}. + +peername({cowboy, Req}) -> cowboy_req:peer(Req). --spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}. -sockname({cowboy, Req} = R) -> - {Addr, _Req} = cowboy_req:peer(Req), - {Addr, R}. +-spec sockname(req()) -> {inet:ip_address(), + non_neg_integer()}. + +sockname({cowboy, Req}) -> cowboy_req:peer(Req). %% -------------------------------------------------------------------------- --spec reply(non_neg_integer(), headers(), iodata(), req()) -> req(). +-spec reply(non_neg_integer(), headers(), iodata(), + req()) -> req(). + reply(Code, Headers, Body, {cowboy, Req}) -> Body1 = iolist_to_binary(Body), - {ok, Req1} = cowboy_req:reply(Code, enbinary(Headers), Body1, Req), + {ok, Req1} = cowboy_req:reply(Code, enbinary(Headers), + Body1, Req), {cowboy, Req1}. --spec chunk_start(non_neg_integer(), headers(), req()) -> req(). +-spec chunk_start(non_neg_integer(), headers(), + req()) -> req(). + chunk_start(Code, Headers, {cowboy, Req}) -> - {ok, Req1} = cowboy_req:chunked_reply(Code, enbinary(Headers), Req), + {ok, Req1} = cowboy_req:chunked_reply(Code, + enbinary(Headers), Req), {cowboy, Req1}. -spec chunk(iodata(), req()) -> {ok | error, req()}. + chunk(Chunk, {cowboy, Req} = R) -> case cowboy_req:chunk(Chunk, Req) of - ok -> {ok, R}; - {error, _E} -> {error, R} - %% This shouldn't happen too often, usually we - %% should catch tco socket closure before. + ok -> {ok, R}; + {error, _E} -> + {error, + R} %% This shouldn't happen too often, usually we + %% should catch tco socket closure before. end. -spec chunk_end(req()) -> req(). -chunk_end({cowboy, _Req} = R) -> R. -enbinary(L) -> [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L]. +chunk_end({cowboy, _Req} = R) -> R. +enbinary(L) -> + [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L]. -spec hook_tcp_close(req()) -> req(). + hook_tcp_close(R = {cowboy, Req}) -> [T, S] = cowboy_req:get([transport, socket], Req), - T:setopts(S,[{active,once}]), + T:setopts(S, [{active, once}]), R. -spec unhook_tcp_close(req()) -> req(). + unhook_tcp_close(R = {cowboy, Req}) -> [T, S] = cowboy_req:get([transport, socket], Req), - T:setopts(S,[{active,false}]), + T:setopts(S, [{active, false}]), R. -spec abruptly_kill(req()) -> req(). + abruptly_kill(R = {cowboy, Req}) -> [T, S] = cowboy_req:get([transport, socket], Req), ok = T:close(S), diff --git a/src/sockjs_json.erl b/src/sockjs_json.erl index e61f4b9..3d10277 100644 --- a/src/sockjs_json.erl +++ b/src/sockjs_json.erl @@ -1,17 +1,18 @@ -module(sockjs_json). --export([encode/1, decode/1]). +-export([decode/1, encode/1]). %% -------------------------------------------------------------------------- -spec encode(any()) -> iodata(). -encode(Thing) -> - mochijson2_fork:encode(Thing). + +encode(Thing) -> mochijson2_fork:encode(Thing). -spec decode(iodata()) -> {ok, any()} | {error, any()}. + decode(Encoded) -> try mochijson2_fork:decode(Encoded) of - V -> {ok, V} + V -> {ok, V} catch - _:E -> {error, E} + _:E -> {error, E} end. diff --git a/src/sockjs_multiplex.erl b/src/sockjs_multiplex.erl index 3922e8c..13f84bc 100644 --- a/src/sockjs_multiplex.erl +++ b/src/sockjs_multiplex.erl @@ -3,141 +3,152 @@ -behaviour(sockjs_service). -export([init_state/1, init_state/2]). --export([sockjs_init/2, sockjs_handle/3, sockjs_terminate/2]). + +-export([sockjs_handle/3, sockjs_init/2, + sockjs_terminate/2]). -record(service, {callback, state, vconn}). + -record(authen_callback, {callback, success = false}). %% -------------------------------------------------------------------------- init_state(Services, {AuthenCallback, Options}) -> - L = [{Topic, #service{callback = Callback, state = State}} || - {Topic, Callback, State} <- Services], - + L = [{Topic, + #service{callback = Callback, state = State}} + || {Topic, Callback, State} <- Services], Extra = case lists:keyfind(state, 1, Options) of - {state, ExtraValue} -> - case erlang:is_list(ExtraValue) of - true -> - ExtraValue; - false -> - [] - end; - false -> - [] - end, - + {state, ExtraValue} -> + case erlang:is_list(ExtraValue) of + true -> ExtraValue; + false -> [] + end; + false -> [] + end, % Services, Channels, AuthenCallback, Extra {orddict:from_list(L), orddict:new(), - #authen_callback{callback = AuthenCallback, success = false}, + #authen_callback{callback = AuthenCallback, + success = false}, Extra}. init_state(Services) -> init_state(Services, {undefined, []}). - %% Get result of authentication callback if it exists. %% Otherwise return ``authen_callback_not_found``. %% Authentication callback should return {ok, State} or {success, State}. -get_authen_callback_result(#authen_callback{callback = AuthenCallback}, - Handle, What, Extra) -> +get_authen_callback_result(#authen_callback{callback = + AuthenCallback}, + Handle, What, Extra) -> case erlang:is_function(AuthenCallback) of - true -> - AuthenCallback(Handle, What, Extra); - false -> - authen_callback_not_found + true -> AuthenCallback(Handle, What, Extra); + false -> authen_callback_not_found end. -sockjs_init(Conn, {Services, Channels, AuthenCallbackRec, Extra} = S) -> - case get_authen_callback_result(AuthenCallbackRec, Conn, init, Extra) of - authen_callback_not_found -> - {ok, S}; - {ok, Extra1} -> - {ok, {Services, Channels, AuthenCallbackRec, Extra1}} +sockjs_init(Conn, + {Services, Channels, AuthenCallbackRec, Extra} = S) -> + case get_authen_callback_result(AuthenCallbackRec, Conn, + init, Extra) + of + authen_callback_not_found -> {ok, S}; + {ok, Extra1} -> + {ok, {Services, Channels, AuthenCallbackRec, Extra1}} end. -sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra}) -> - [Type, Topic, Payload] = split($,, binary_to_list(Data), 3), +sockjs_handle_via_channel(Conn, Data, + {Services, Channels, AuthenCallbackRec, Extra}) -> + [Type, Topic, Payload] = split($,, binary_to_list(Data), + 3), case orddict:find(Topic, Services) of - {ok, Service} -> - Channels1 = action(Conn, {Type, Topic, Payload}, Service, Channels, Extra), - {ok, {Services, Channels1, AuthenCallbackRec, Extra}}; - _Else -> - {ok, {Services, Channels, AuthenCallbackRec, Extra}} + {ok, Service} -> + Channels1 = action(Conn, {Type, Topic, Payload}, + Service, Channels, Extra), + {ok, {Services, Channels1, AuthenCallbackRec, Extra}}; + _Else -> + {ok, {Services, Channels, AuthenCallbackRec, Extra}} end. -sockjs_handle(Conn, Data, {Services, Channels, - #authen_callback{success = Success} = AuthenCallbackRec, - Extra} = S) -> +sockjs_handle(Conn, Data, + {Services, Channels, + #authen_callback{success = Success} = AuthenCallbackRec, + Extra} = + S) -> case Success of - true -> - sockjs_handle_via_channel(Conn, Data, S); - false -> - case get_authen_callback_result(AuthenCallbackRec, Conn, {recv, Data}, Extra) of - authen_callback_not_found -> - sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra}); - {success, Extra1} -> - {ok, {Services, Channels, AuthenCallbackRec#authen_callback{success = true}, Extra1}}; - {ok, Extra1} -> - {ok, {Services, Channels, AuthenCallbackRec, Extra1}} - end + true -> sockjs_handle_via_channel(Conn, Data, S); + false -> + case get_authen_callback_result(AuthenCallbackRec, Conn, + {recv, Data}, Extra) + of + authen_callback_not_found -> + sockjs_handle_via_channel(Conn, Data, + {Services, Channels, + AuthenCallbackRec, Extra}); + {success, Extra1} -> + {ok, + {Services, Channels, + AuthenCallbackRec#authen_callback{success = true}, + Extra1}}; + {ok, Extra1} -> + {ok, {Services, Channels, AuthenCallbackRec, Extra1}} + end end. -sockjs_terminate(Conn, {Services, Channels, AuthenCallbackRec, Extra}) -> - case get_authen_callback_result(AuthenCallbackRec, Conn, closed, Extra) of - {ok, Extra1} -> - ok; - _Else -> - Extra1 = Extra +sockjs_terminate(Conn, + {Services, Channels, AuthenCallbackRec, Extra}) -> + case get_authen_callback_result(AuthenCallbackRec, Conn, + closed, Extra) + of + {ok, Extra1} -> ok; + _Else -> Extra1 = Extra end, + _ = [{emit(closed, Channel)} + || {_Topic, Channel} <- orddict:to_list(Channels)], + {ok, + {Services, orddict:new(), AuthenCallbackRec, Extra1}}. - _ = [ {emit(closed, Channel)} || - {_Topic, Channel} <- orddict:to_list(Channels) ], - {ok, {Services, orddict:new(), AuthenCallbackRec, Extra1}}. - - -action(Conn, {Type, Topic, Payload}, Service, Channels, Extra) -> +action(Conn, {Type, Topic, Payload}, Service, Channels, + Extra) -> case {Type, orddict:is_key(Topic, Channels)} of - {"sub", false} -> - Channel = Service#service{ - state = Service#service.state ++ Extra, - vconn = {sockjs_multiplex_channel, Conn, Topic} - }, - orddict:store(Topic, emit(init, Channel), Channels); - {"uns", true} -> - Channel = orddict:fetch(Topic, Channels), - emit(closed, Channel), - orddict:erase(Topic, Channels); - {"msg", true} -> - Channel = orddict:fetch(Topic, Channels), - orddict:store(Topic, emit({recv, Payload}, Channel), Channels); - _Else -> - %% Ignore - Channels + {"sub", false} -> + Channel = Service#service{state = + Service#service.state ++ Extra, + vconn = + {sockjs_multiplex_channel, Conn, + Topic}}, + orddict:store(Topic, emit(init, Channel), Channels); + {"uns", true} -> + Channel = orddict:fetch(Topic, Channels), + emit(closed, Channel), + orddict:erase(Topic, Channels); + {"msg", true} -> + Channel = orddict:fetch(Topic, Channels), + orddict:store(Topic, emit({recv, Payload}, Channel), + Channels); + _Else -> + %% Ignore + Channels end. - -emit(What, Channel = #service{callback = Callback, - state = State, - vconn = VConn}) -> +emit(What, + Channel = #service{callback = Callback, state = State, + vconn = VConn}) -> case Callback(VConn, What, State) of - {ok, State1} -> Channel#service{state = State1}; - ok -> Channel + {ok, State1} -> Channel#service{state = State1}; + ok -> Channel end. - %% -------------------------------------------------------------------------- split(Char, Str, Limit) when Limit > 0 -> - Acc = split(Char, Str, Limit, []), - lists:reverse(Acc); -split(_Char, Str, 0) -> - [Str]. + Acc = split(Char, Str, Limit, []), lists:reverse(Acc); +split(_Char, Str, 0) -> [Str]. -split(_Char, Str, 1, Acc) -> - [Str | Acc]; +split(_Char, Str, 1, Acc) -> [Str | Acc]; split(Char, Str, Limit, Acc) -> {L, R} = case string:chr(Str, Char) of - 0 -> {Str, ""}; - I -> {string:substr(Str, 1, I-1), string:substr(Str, I+1)} - end, - split(Char, R, Limit-1, [L | Acc]). + 0 -> {Str, ""}; + I -> + {string:substr(Str, 1, I - 1), + string:substr(Str, I + 1)} + end, + split(Char, R, Limit - 1, [L | Acc]). diff --git a/src/sockjs_service.erl b/src/sockjs_service.erl index df0d79b..b4a94ca 100644 --- a/src/sockjs_service.erl +++ b/src/sockjs_service.erl @@ -3,11 +3,6 @@ -export([behaviour_info/1]). behaviour_info(callbacks) -> - [ - {sockjs_init, 2}, - {sockjs_handle, 3}, - {sockjs_terminate, 2} - ]; - -behaviour_info(_Other) -> - undefined. + [{sockjs_init, 2}, {sockjs_handle, 3}, + {sockjs_terminate, 2}]; +behaviour_info(_Other) -> undefined. diff --git a/src/sockjs_session_sup.erl b/src/sockjs_session_sup.erl index 4036c04..ba48bcb 100644 --- a/src/sockjs_session_sup.erl +++ b/src/sockjs_session_sup.erl @@ -2,19 +2,24 @@ -behaviour(supervisor). --export([start_link/0, start_child/3]). +-export([start_child/3, start_link/0]). + -export([init/1]). %% -------------------------------------------------------------------------- --spec start_link() -> ignore | {'ok', pid()} | {'error', any()}. +-spec start_link() -> ignore | {ok, pid()} | + {error, any()}. + start_link() -> - supervisor:start_link({local, ?MODULE}, ?MODULE, []). + supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - {ok, {{simple_one_for_one, 10000, 1}, - [{undefined, {sockjs_session, start_link, []}, - transient, 5000, worker, [sockjs_session]}]}}. + {ok, + {{simple_one_for_one, 10000, 1}, + [{undefined, {sockjs_session, start_link, []}, + transient, 5000, worker, [sockjs_session]}]}}. start_child(SessionId, Service, Info) -> - supervisor:start_child(?MODULE, [SessionId, Service, Info]). + supervisor:start_child(?MODULE, + [SessionId, Service, Info]). diff --git a/src/sockjs_util.erl b/src/sockjs_util.erl index be3f972..0146712 100644 --- a/src/sockjs_util.erl +++ b/src/sockjs_util.erl @@ -1,7 +1,9 @@ -module(sockjs_util). -export([rand32/0]). + -export([encode_frame/1]). + -export([url_escape/2]). -include("sockjs_internal.hrl"). @@ -9,37 +11,36 @@ %% -------------------------------------------------------------------------- -spec rand32() -> non_neg_integer(). + rand32() -> case get(random_seeded) of - undefined -> - {MegaSecs, Secs, MicroSecs} = now(), - _ = random:seed(MegaSecs, Secs, MicroSecs), - put(random_seeded, true); - _Else -> - ok + undefined -> + {MegaSecs, Secs, MicroSecs} = now(), + _ = random:seed(MegaSecs, Secs, MicroSecs), + put(random_seeded, true); + _Else -> ok end, - random:uniform(erlang:trunc(math:pow(2,32)))-1. - + random:uniform(erlang:trunc(math:pow(2, 32))) - 1. -spec encode_frame(frame()) -> iodata(). -encode_frame({open, nil}) -> - <<"o">>; + +encode_frame({open, nil}) -> <<"o">>; encode_frame({close, {Code, Reason}}) -> [<<"c">>, sockjs_json:encode([Code, list_to_binary(Reason)])]; encode_frame({data, L}) -> [<<"a">>, sockjs_json:encode([iolist_to_binary(D) || D <- L])]; -encode_frame({heartbeat, nil}) -> - <<"h">>. - +encode_frame({heartbeat, nil}) -> <<"h">>. -spec url_escape(string(), string()) -> iolist(). + url_escape(Str, Chars) -> [case lists:member(Char, Chars) of - true -> hex(Char); - false -> Char - end || Char <- Str]. + true -> hex(Char); + false -> Char + end + || Char <- Str]. hex(C) -> <> = <>, diff --git a/src/sockjs_ws_handler.erl b/src/sockjs_ws_handler.erl index bcf463d..3929853 100644 --- a/src/sockjs_ws_handler.erl +++ b/src/sockjs_ws_handler.erl @@ -1,58 +1,59 @@ -module(sockjs_ws_handler). --export([received/3, reply/2, close/2]). +-export([close/2, received/3, reply/2]). -include("sockjs_internal.hrl"). %% -------------------------------------------------------------------------- --spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown. +-spec received(websocket | rawwebsocket, pid(), + binary()) -> ok | shutdown. + %% Ignore empty -received(_RawWebsocket, _SessionPid, <<>>) -> - ok; +received(_RawWebsocket, _SessionPid, <<>>) -> ok; received(websocket, SessionPid, Data) -> case sockjs_json:decode(Data) of - {ok, Msg} when is_binary(Msg) -> - session_received([Msg], SessionPid); - {ok, Messages} when is_list(Messages) -> - session_received(Messages, SessionPid); - _Else -> - shutdown + {ok, Msg} when is_binary(Msg) -> + session_received([Msg], SessionPid); + {ok, Messages} when is_list(Messages) -> + session_received(Messages, SessionPid); + _Else -> shutdown end; - received(rawwebsocket, SessionPid, Data) -> session_received([Data], SessionPid). session_received(Messages, SessionPid) -> try sockjs_session:received(Messages, SessionPid) of - ok -> ok + ok -> ok catch - no_session -> shutdown + no_session -> shutdown end. --spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait. +-spec reply(websocket | rawwebsocket, pid()) -> {close | + open, + binary()} | + wait. + reply(websocket, SessionPid) -> case sockjs_session:reply(SessionPid) of - {W, Frame} when W =:= ok orelse W =:= close-> - Frame1 = sockjs_util:encode_frame(Frame), - {W, iolist_to_binary(Frame1)}; - wait -> - wait + {W, Frame} when W =:= ok orelse W =:= close -> + Frame1 = sockjs_util:encode_frame(Frame), + {W, iolist_to_binary(Frame1)}; + wait -> wait end; reply(rawwebsocket, SessionPid) -> case sockjs_session:reply(SessionPid, false) of - {W, Frame} when W =:= ok orelse W =:= close-> - case Frame of - {open, nil} -> reply(rawwebsocket, SessionPid); - {close, {_Code, _Reason}} -> {close, <<>>}; - {data, [Msg]} -> {ok, iolist_to_binary(Msg)}; - {heartbeat, nil} -> reply(rawwebsocket, SessionPid) - end; - wait -> - wait + {W, Frame} when W =:= ok orelse W =:= close -> + case Frame of + {open, nil} -> reply(rawwebsocket, SessionPid); + {close, {_Code, _Reason}} -> {close, <<>>}; + {data, [Msg]} -> {ok, iolist_to_binary(Msg)}; + {heartbeat, nil} -> reply(rawwebsocket, SessionPid) + end; + wait -> wait end. --spec close(websocket|rawwebsocket, pid()) -> ok. +-spec close(websocket | rawwebsocket, pid()) -> ok. + close(_RawWebsocket, SessionPid) -> - SessionPid ! force_shutdown, - ok. + SessionPid ! force_shutdown, ok. From 7dda84615b379ee9f12380b980dc48389c8ed302 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 16:56:55 +0700 Subject: [PATCH 4/9] remove unneded --- .../rebar_compiler_erl/source_apps.dag | Bin 439 -> 0 bytes .../plugins/erl_tidy_prv_fmt/.gitignore | 18 - .../default/plugins/erl_tidy_prv_fmt/LICENSE | 29 - .../plugins/erl_tidy_prv_fmt/README.md | 27 - .../plugins/erl_tidy_prv_fmt/rebar.config | 2 - .../plugins/erl_tidy_prv_fmt/rebar.lock | 1 - .../erl_tidy_prv_fmt/src/erl_tidy.app.src | 9 - .../plugins/erl_tidy_prv_fmt/src/erl_tidy.erl | 1914 ----------------- .../erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl | 73 - 9 files changed, 2073 deletions(-) delete mode 100644 _build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/.gitignore delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/LICENSE delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/README.md delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/rebar.config delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/rebar.lock delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl delete mode 100644 _build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl diff --git a/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag b/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag deleted file mode 100644 index 88f76700b1dd642d382e634cf515847100f4d5bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 439 zcmV;o0Z9IXPyhf6O?Y1IR=rNcFc5Z=wp6f`H(+3GELv0~M&5v-Ag1UNJB?#EiJYWT zpBM2SoRg+a2NQxRzw}`MjNLpl}Oo|!T3eq+8gGwky8IPb+CCyc}7HL88)-MIy z`MKWu(Waz45b5F63pi|Q4xmdBL0B5-D|0e!S@~OJAhNV&XD-KCRWgBu-!4Otk!!|- zYVC_O$_(H@NV4i5tI%O<{iA>DA`Ml=c#H7bI|6=$=4GYR#E2#6-9E+m9JPW{ik;XE zP#|C6z$A8I3U{zD*vk`aiBIYL!CO!z0*bqotJxB_`-sp8 diff --git a/_build/default/plugins/erl_tidy_prv_fmt/.gitignore b/_build/default/plugins/erl_tidy_prv_fmt/.gitignore deleted file mode 100644 index 40a1d4f..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -.rebar3 -_* -.eunit -*.o -*.beam -*.plt -*.swp -*.swo -.erlang.cookie -ebin -log -erl_crash.dump -.rebar -_rel -_deps -_plugins -_tdeps -logs diff --git a/_build/default/plugins/erl_tidy_prv_fmt/LICENSE b/_build/default/plugins/erl_tidy_prv_fmt/LICENSE deleted file mode 100644 index f592795..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -Copyright (c) 2015, Tristan Sloughter . -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -* Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -* The names of its contributors may not be used to endorse or promote - products derived from this software without specific prior written - permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/README.md b/_build/default/plugins/erl_tidy_prv_fmt/README.md deleted file mode 100644 index 69ae99f..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/README.md +++ /dev/null @@ -1,27 +0,0 @@ -erl_tidy -===== - -Format your Erlang modules. - -Use ---- - -Add the following to your `rebar.config` - -```erlang -{plugins, [{erl_tidy_prv_fmt, ".*", {git, "git://github.com/tsloughter/erl_tidy.git", {branch, "master"}}}]}. -``` - -And run: - - $ rebar3 fmt - -Run `rebar3 help fmt` to learn about formatting options. - -You also may specify options in your `rebar.config` like so: -```erlang -{fmt_opts, [ - {auto_list_comp, false}, - {keep_unused, true} -]}. -``` \ No newline at end of file diff --git a/_build/default/plugins/erl_tidy_prv_fmt/rebar.config b/_build/default/plugins/erl_tidy_prv_fmt/rebar.config deleted file mode 100644 index f618f3e..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/rebar.config +++ /dev/null @@ -1,2 +0,0 @@ -{erl_opts, [debug_info]}. -{deps, []}. \ No newline at end of file diff --git a/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock b/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock deleted file mode 100644 index 57afcca..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/rebar.lock +++ /dev/null @@ -1 +0,0 @@ -[]. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src deleted file mode 100644 index fdbafb6..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.app.src +++ /dev/null @@ -1,9 +0,0 @@ -{application, erl_tidy, - [{description, "An OTP library"} - ,{vsn, "0.1.0"} - ,{registered, []} - ,{applications, - [kernel,stdlib]} - ,{env,[]} - ,{modules, []} - ]}. diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl deleted file mode 100644 index f2de12b..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy.erl +++ /dev/null @@ -1,1914 +0,0 @@ -%% ===================================================================== -%% This library is free software; you can redistribute it and/or -%% modify it under the terms of the GNU Lesser General Public License -%% as published by the Free Software Foundation; either version 2 of -%% the License, or (at your option) any later version. -%% -%% This library is distributed in the hope that it will be useful, but -%% WITHOUT ANY WARRANTY; without even the implied warranty of -%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -%% Lesser General Public License for more details. -%% -%% You should have received a copy of the GNU Lesser General Public -%% License along with this library; if not, write to the Free Software -%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -%% USA -%% -%% @copyright 1999-2014 Richard Carlsson -%% @author Richard Carlsson -%% @end -%% ===================================================================== - -%% @doc Tidies and pretty-prints Erlang source code, removing unused -%% functions, updating obsolete constructs and function calls, etc. -%% -%% Caveats: It is possible that in some intricate uses of macros, -%% the automatic addition or removal of parentheses around uses or -%% arguments could cause the resulting program to be rejected by the -%% compiler; however, we have found no such case in existing -%% code. Programs defining strange macros can usually not be read by -%% this program, and in those cases, no changes will be made. -%% -%% If you really, really want to, you may call it "Inga". -%% -%% Disclaimer: The author accepts no responsibility for errors -%% introduced in code that has been processed by the program. It has -%% been reasonably well tested, but the possibility of errors remains. -%% Keep backups of your original code safely stored, until you feel -%% confident that the new, modified code can be trusted. - --module(erl_tidy). - --export([dir/0, dir/1, dir/2, file/1, file/2, module/1, module/2]). - --include_lib("kernel/include/file.hrl"). - --define(DEFAULT_BACKUP_SUFFIX, ".bak"). --define(DEFAULT_DIR, ""). --define(DEFAULT_REGEXP, ".*\\.erl$"). - -%% ===================================================================== - --type options() :: [atom() | {atom(), any()}]. - -%% ===================================================================== - -dir__defaults() -> - [{follow_links, false}, - recursive, - {regexp, ?DEFAULT_REGEXP}, - verbose]. - -%% ===================================================================== -%% @spec dir() -> ok -%% @equiv dir("") - --spec dir() -> 'ok'. - -dir() -> - dir(""). - -%% ===================================================================== -%% @spec dir(Dir) -> ok -%% @equiv dir(Dir, []) - --spec dir(file:filename()) -> 'ok'. - -dir(Dir) -> - dir(Dir, []). - -%% ===================================================================== -%% @spec dir(Directory::filename(), Options::[term()]) -> ok -%% filename() = file:filename() -%% -%% @doc Tidies Erlang source files in a directory and its -%% subdirectories. -%% -%% Available options: -%%
-%%
{follow_links, boolean()}
-%% -%%
If the value is `true', symbolic directory -%% links will be followed. The default value is -%% `false'.
-%% -%%
{recursive, boolean()}
-%% -%%
If the value is `true', subdirectories will be -%% visited recursively. The default value is -%% `true'.
-%% -%%
{regexp, string()}
-%% -%%
The value denotes a regular expression (see module -%% `re'). Tidying will only be applied to those -%% regular files whose names match this pattern. The default -%% value is `".*\\.erl$"', which matches normal -%% Erlang source file names.
-%% -%%
{test, boolean()}
-%% -%%
If the value is `true', no files will be -%% modified. The default value is `false'.
-%% -%%
{verbose, boolean()}
-%% -%%
If the value is `true', progress messages will -%% be output while the program is running, unless the -%% `quiet' option is `true'. The default -%% value when calling {@link dir/2} is `true'.
-%% -%%
-%% -%% See the function {@link file/2} for further options. -%% -%% @see //stdlib/re -%% @see file/2 - --record(dir, {follow_links = false :: boolean(), - recursive = true :: boolean(), - options :: options()}). - --spec dir(file:filename(), options()) -> 'ok'. - -dir(Dir, Opts) -> - Opts1 = Opts ++ dir__defaults(), - Env = #dir{follow_links = proplists:get_bool(follow_links, Opts1), - recursive = proplists:get_bool(recursive, Opts1), - options = Opts1}, - Regexp = proplists:get_value(regexp, Opts1), - case filename(Dir) of - "" -> - Dir1 = "."; - Dir1 -> - ok - end, - dir_1(Dir1, Regexp, Env). - -dir_1(Dir, Regexp, Env) -> - case file:list_dir(Dir) of - {ok, Files} -> - lists:foreach(fun (X) -> dir_2(X, Regexp, Dir, Env) end, - Files); - {error, _} -> - report_error("error reading directory `~ts'", - [filename(Dir)]), - exit(error) - end. - -dir_2(Name, Regexp, Dir, Env) -> - File = if Dir =:= "" -> - Name; - true -> - filename:join(Dir, Name) - end, - case file_type(File) of - {value, regular} -> - dir_4(File, Regexp, Env); - {value, directory} when Env#dir.recursive =:= true -> - case is_symlink(Name) of - false -> - dir_3(Name, Dir, Regexp, Env); - true when Env#dir.follow_links =:= true -> - dir_3(Name, Dir, Regexp, Env); - _ -> - ok - end; - _ -> - ok - end. - -dir_3(Name, Dir, Regexp, Env) -> - Dir1 = filename:join(Dir, Name), - verbose("tidying directory `~ts'.", [Dir1], Env#dir.options), - dir_1(Dir1, Regexp, Env). - -dir_4(File, Regexp, Env) -> - case re:run(File, Regexp) of - {match, _} -> - Opts = [{outfile, File}, {dir, ""} | Env#dir.options], - case catch file(File, Opts) of - {'EXIT', Value} -> - warn("error tidying `~ts'.~n~p", [File,Value], Opts); - _ -> - ok - end; - nomatch -> - ok - end. - -file__defaults() -> - [{backup_suffix, ?DEFAULT_BACKUP_SUFFIX}, - backups, - {dir, ?DEFAULT_DIR}, - {printer, default_printer()}, - {quiet, false}, - {verbose, false}]. - -default_printer() -> - fun (Tree, Options) -> erl_prettypr:format(Tree, Options) end. - -%% ===================================================================== -%% @spec file(Name) -> ok -%% @equiv file(Name, []) - --spec file(file:filename()) -> 'ok'. - -file(Name) -> - file(Name, []). - -%% ===================================================================== -%% @spec file(Name::filename(), Options::[term()]) -> ok -%% -%% @doc Tidies an Erlang source code file. -%% -%% Available options are: -%%
-%%
{backup_suffix, string()}
-%% -%%
Specifies the file name suffix to be used when a backup -%% file is created; the default value is `".bak"' -%% (cf. the `backups' option).
-%% -%%
{backups, boolean()}
-%% -%%
If the value is `true', existing files will be -%% renamed before new files are opened for writing. The new -%% names are formed by appending the string given by the -%% `backup_suffix' option to the original name. The -%% default value is `true'.
-%% -%%
{dir, filename()}
-%% -%%
Specifies the name of the directory in which the output -%% file is to be written. By default, the current directory is -%% used. If the value is an empty string, the current directory -%% is used.
-%% -%%
{outfile, filename()}
-%% -%%
Specifies the name of the file (without suffix) to which -%% the resulting source code is to be written. If this option is -%% not specified, the `Name' argument is used.
-%% -%%
{printer, Function}
-%%
    -%%
  • `Function = (syntaxTree()) -> string()'
  • -%%
-%% -%% Specifies a function for prettyprinting Erlang syntax trees. -%% This is used for outputting the resulting module definition. -%% The function is assumed to return formatted text for the given -%% syntax tree, and should raise an exception if an error occurs. -%% The default formatting function calls -%% `erl_prettypr:format/2'.
-%% -%%
{test, boolean()}
-%% -%%
If the value is `true', no files will be modified; this -%% is typically most useful if the `verbose' flag is enabled, to -%% generate reports about the program files without affecting -%% them. The default value is `false'.
-%% -%%
{stdout, boolean()}
-%% -%%
If the value is `true', instead of the file being written -%% to disk it will be printed to stdout. The default value is -%% `false'.
-%% -%%
-%% -%% See the function `module/2' for further options. -%% -%% @see erl_prettypr:format/2 -%% @see module/2 - --spec file(file:filename(), options()) -> 'ok'. - -file(Name, Opts) -> - Parent = self(), - Child = spawn_link(fun () -> file_1(Parent, Name, Opts) end), - receive - {Child, ok} -> - ok; - {Child, {error, Reason}} -> - exit(Reason) - end. - -file_1(Parent, Name, Opts) -> - try file_2(Name, Opts) of - _ -> - Parent ! {self(), ok} - catch - throw:syntax_error -> % ignore syntax errors - Parent ! {self(), ok}; - error:Reason -> - Parent ! {self(), {error, Reason}} - end. - -file_2(Name, Opts) -> - Opts1 = Opts ++ file__defaults(), - Forms = read_module(Name, Opts1), - Comments = erl_comment_scan:file(Name), - Forms1 = erl_recomment:recomment_forms(Forms, Comments), - Tree = module(Forms1, [{file, Name} | Opts1]), - case proplists:get_bool(test, Opts1) of - true -> - ok; - false -> - case proplists:get_bool(stdout, Opts1) of - true -> - print_module(Tree, Opts1), - ok; - false -> - write_module(Tree, Name, Opts1), - ok - end - end. - -read_module(Name, Opts) -> - verbose("reading module `~ts'.", [filename(Name)], Opts), - case epp_dodger:parse_file(Name, [no_fail]) of - {ok, Forms} -> - check_forms(Forms, Name), - Forms; - {error, R} -> - error_read_file(Name), - exit({error, R}) - end. - -check_forms(Fs, Name) -> - Fun = fun (F) -> - case erl_syntax:type(F) of - error_marker -> - S = case erl_syntax:error_marker_info(F) of - {_, M, D} -> - M:format_error(D); - _ -> - "unknown error" - end, - report_error({Name, erl_syntax:get_pos(F), - "\n ~ts"}, [S]), - exit(error); - _ -> - ok - end - end, - lists:foreach(Fun, Fs). - -%% Create the target directory and make a backup file if necessary, -%% then open the file, output the text and close the file -%% safely. Returns the file name. - -write_module(Tree, Name, Opts) -> - Name1 = proplists:get_value(outfile, Opts, filename(Name)), - Dir = filename(proplists:get_value(dir, Opts, "")), - File = if Dir =:= "" -> - Name1; - true -> - case file_type(Dir) of - {value, directory} -> - ok; - {value, _} -> - report_error("`~ts' is not a directory.", - [filename(Dir)]), - exit(error); - none -> - case file:make_dir(Dir) of - ok -> - verbose("created directory `~ts'.", - [filename(Dir)], Opts), - ok; - E -> - report_error("failed to create " - "directory `~ts'.", - [filename(Dir)]), - exit({make_dir, E}) - end - end, - filename(filename:join(Dir, Name1)) - end, - Encoding = [{encoding,Enc} || Enc <- [epp:read_encoding(Name)], - Enc =/= none], - case proplists:get_bool(backups, Opts) of - true -> - backup_file(File, Opts); - false -> - ok - end, - Printer = proplists:get_value(printer, Opts), - FD = open_output_file(File, Encoding), - verbose("writing to file `~ts'.", [File], Opts), - V = (catch {ok, output(FD, Printer, Tree, Opts++Encoding)}), - ok = file:close(FD), - case V of - {ok, _} -> - File; - {'EXIT', R} -> - error_write_file(File), - exit(R); - R -> - error_write_file(File), - throw(R) - end. - -print_module(Tree, Opts) -> - Printer = proplists:get_value(printer, Opts), - io:format(Printer(Tree, Opts)). - -output(FD, Printer, Tree, Opts) -> - io:put_chars(FD, Printer(Tree, Opts)), - io:nl(FD). - -%% file_type(file:filename()) -> {value, Type} | none - -file_type(Name) -> - file_type(Name, false). - -is_symlink(Name) -> - file_type(Name, true) =:= {value, symlink}. - -file_type(Name, Links) -> - V = case Links of - true -> - catch file:read_link_info(Name); - false -> - catch file:read_file_info(Name) - end, - case V of - {ok, Env} -> - {value, Env#file_info.type}; - {error, enoent} -> - none; - {error, R} -> - error_read_file(Name), - exit({error, R}); - {'EXIT', R} -> - error_read_file(Name), - exit(R); - R -> - error_read_file(Name), - throw(R) - end. - -open_output_file(FName, Options) -> - case catch file:open(FName, [write]++Options) of - {ok, FD} -> - FD; - {error, R} -> - error_open_output(FName), - exit({error, R}); - {'EXIT', R} -> - error_open_output(FName), - exit(R); - R -> - error_open_output(FName), - exit(R) - end. - -%% If the file exists, rename it by appending the given suffix to the -%% file name. - -backup_file(Name, Opts) -> - case file_type(Name) of - {value, regular} -> - backup_file_1(Name, Opts); - {value, _} -> - error_backup_file(Name), - exit(error); - none -> - ok - end. - -%% The file should exist and be a regular file here. - -backup_file_1(Name, Opts) -> - Suffix = proplists:get_value(backup_suffix, Opts, ""), - Dest = filename:join(filename:dirname(Name), - filename:basename(Name) ++ Suffix), - case catch file:rename(Name, Dest) of - ok -> - verbose("made backup of file `~ts'.", [Name], Opts); - {error, R} -> - error_backup_file(Name), - exit({error, R}); - {'EXIT', R} -> - error_backup_file(Name), - exit(R); - R -> - error_backup_file(Name), - throw(R) - end. - -%% ===================================================================== -%% @spec module(Forms) -> syntaxTree() -%% @equiv module(Forms, []) - --spec module(erl_syntax:forms()) -> erl_syntax:syntaxTree(). - -module(Forms) -> - module(Forms, []). - -%% ===================================================================== -%% @spec module(Forms, Options::[term()]) -> syntaxTree() -%% -%% Forms = syntaxTree() | [syntaxTree()] -%% syntaxTree() = erl_syntax:syntaxTree() -%% -%% @doc Tidies a syntax tree representation of a module -%% definition. The given `Forms' may be either a single -%% syntax tree of type `form_list', or a list of syntax -%% trees representing "program forms". In either case, -%% `Forms' must represent a single complete module -%% definition. The returned syntax tree has type -%% `form_list' and represents a tidied-up version of the -%% same source code. -%% -%% Available options are: -%%
-%%
{auto_export_vars, boolean()}
-%% -%%
If the value is `true', all matches -%% "`{V1, ..., Vn} = E'" where `E' is a -%% case-, if- or receive-expression whose branches all return -%% n-tuples (or explicitly throw exceptions) will be rewritten -%% to bind and export the variables `V1', ..., -%% `Vn' directly. The default value is `false'. -%% -%% For example: -%%
-%%                {X, Y} = case ... of
-%%                             ... -> {17, foo()};
-%%                             ... -> {42, bar()}
-%%                         end
-%%       
-%% will be rewritten to: -%%
-%%                case ... of
-%%                    ... -> X = 17, Y = foo(), {X, Y};
-%%                    ... -> X = 42, Y = bar(), {X, Y}
-%%                end
-%%       
-%% -%%
{auto_list_comp, boolean()}
-%% -%%
If the value is `true', calls to `lists:map/2' and -%% `lists:filter/2' will be rewritten using list comprehensions. -%% The default value is `true'.
-%% -%%
{file, string()}
-%% -%%
Specifies the name of the file from which the source code -%% was taken. This is only used for generation of error -%% reports. The default value is the empty string.
-%% -%%
{idem, boolean()}
-%% -%%
If the value is `true', all options that affect how the -%% code is modified are set to "no changes". For example, to -%% only update guard tests, and nothing else, use the options -%% `[new_guard_tests, idem]'. (Recall that options closer to the -%% beginning of the list have higher precedence.)
-%% -%%
{keep_unused, boolean()}
-%% -%%
If the value is `true', unused functions will -%% not be removed from the code. The default value is -%% `false'.
-%% -%%
{new_guard_tests, boolean()}
-%% -%%
If the value is `true', guard tests will be updated to -%% use the new names, e.g. "`is_integer(X)'" instead of -%% "`integer(X)'". The default value is `true'. See also -%% `old_guard_tests'.
-%% -%%
{no_imports, boolean()}
-%% -%%
If the value is `true', all import statements will be -%% removed and calls to imported functions will be expanded to -%% explicit remote calls. The default value is `false'.
-%% -%%
{old_guard_tests, boolean()}
-%% -%%
If the value is `true', guard tests will be changed to -%% use the old names instead of the new ones, e.g. -%% "`integer(X)'" instead of "`is_integer(X)'". The default -%% value is `false'. This option overrides the `new_guard_tests' -%% option.
-%% -%%
{quiet, boolean()}
-%% -%%
If the value is `true', all information -%% messages and warning messages will be suppressed. The default -%% value is `false'.
-%% -%%
{rename, [{{atom(), atom(), integer()}, -%% {atom(), atom()}}]}
-%% -%%
The value is a list of pairs, associating tuples -%% `{Module, Name, Arity}' with tuples `{NewModule, NewName}', -%% specifying renamings of calls to remote functions. By -%% default, the value is the empty list. -%% -%% The renaming affects only remote calls (also when -%% disguised by import declarations); local calls within a -%% module are not affected, and no function definitions are -%% renamed. Since the arity cannot change, the new name is -%% represented by `{NewModule, NewName}' only. Only -%% calls matching the specified arity will match; multiple -%% entries are necessary for renaming calls to functions that -%% have the same module and function name, but different -%% arities. -%% -%% This option can also be used to override the default -%% renaming of calls which use obsolete function names.
-%% -%%
{verbose, boolean()}
-%% -%%
If the value is `true', progress messages will be output -%% while the program is running, unless the `quiet' option is -%% `true'. The default value is `false'.
-%% -%%
- --spec module(erl_syntax:forms(), [term()]) -> erl_syntax:syntaxTree(). - -module(Forms, Opts) when is_list(Forms) -> - module(erl_syntax:form_list(Forms), Opts); -module(Forms, Opts) -> - Opts1 = proplists:expand(module__expansions(), Opts) - ++ module__defaults(), - File = proplists:get_value(file, Opts1, ""), - Forms1 = erl_syntax:flatten_form_list(Forms), - module_1(Forms1, File, Opts1). - -module__defaults() -> - [{auto_export_vars, false}, - {auto_list_comp, true}, - {keep_unused, false}, - {new_guard_tests, true}, - {no_imports, false}, - {old_guard_tests, false}, - {quiet, false}, - {verbose, false}]. - -module__expansions() -> - [{idem, [{auto_export_vars, false}, - {auto_list_comp, false}, - {keep_unused, true}, - {new_guard_tests, false}, - {no_imports, false}, - {old_guard_tests, false}]}]. - -module_1(Forms, File, Opts) -> - Info = analyze_forms(Forms, File), - Module = get_module_name(Info, File), - Attrs = get_module_attributes(Info), - Exports = get_module_exports(Info), - Imports = get_module_imports(Info), - Opts1 = check_imports(Imports, Opts, File), - Fs = erl_syntax:form_list_elements(Forms), - {Names, Defs} = collect_functions(Fs), - Exports1 = check_export_all(Attrs, Names, Exports), - Roots = ordsets:union(ordsets:from_list(Exports1), - hidden_uses(Fs, Imports)), - {Names1, Used, Imported, Defs1} = visit_used(Names, Defs, Roots, - Imports, Module, - Opts1), - Fs1 = update_forms(Fs, Defs1, Imported, Opts1), - Fs2 = filter_forms(Fs1, Names1, Used, Opts1), - rewrite(Forms, erl_syntax:form_list(Fs2)). - -analyze_forms(Forms, File) -> - case catch {ok, erl_syntax_lib:analyze_forms(Forms)} of - {ok, L1} -> - L1; - syntax_error -> - report_error({File, 0, "syntax error."}), - throw(syntax_error); - {'EXIT', R} -> - exit(R); - R -> - throw(R) - end. - --spec get_module_name([erl_syntax_lib:info_pair()], string()) -> atom(). - -get_module_name(List, File) -> - case lists:keyfind(module, 1, List) of - {module, M} -> - M; - _ -> - report_error({File, 0, - "cannot determine module name."}), - exit(error) - end. - -get_module_attributes(List) -> - case lists:keyfind(attributes, 1, List) of - {attributes, As} -> - As; - _ -> - [] - end. - --spec get_module_exports([erl_syntax_lib:info_pair()]) -> [{atom(), arity()}]. - -get_module_exports(List) -> - case lists:keyfind(exports, 1, List) of - {exports, Es} -> - Es; - _ -> - [] - end. - --spec get_module_imports([erl_syntax_lib:info_pair()]) -> [{atom(), atom()}]. - -get_module_imports(List) -> - case lists:keyfind(imports, 1, List) of - {imports, Is} -> - flatten_imports(Is); - _ -> - [] - end. - -compile_attrs(As) -> - lists:append([if is_list(T) -> T; true -> [T] end - || {compile, T} <- As]). - --spec flatten_imports([{atom(), [atom()]}]) -> [{atom(), atom()}]. - -flatten_imports(Is) -> - [{F, M} || {M, Fs} <- Is, F <- Fs]. - -check_imports(Is, Opts, File) -> - case check_imports_1(lists:sort(Is)) of - true -> - Opts; - false -> - case proplists:get_bool(no_imports, Opts) of - true -> - warn({File, 0, - "conflicting import declarations - " - "will not expand imports."}, - [], Opts), - %% prevent expansion of imports - [{no_imports, false} | Opts]; - false -> - Opts - end - end. - --spec check_imports_1([{atom(), atom()}]) -> boolean(). - -check_imports_1([{F, M1}, {F, M2} | _Is]) when M1 =/= M2 -> - false; -check_imports_1([_ | Is]) -> - check_imports_1(Is); -check_imports_1([]) -> - true. - -check_export_all(Attrs, Names, Exports) -> - case lists:member(export_all, compile_attrs(Attrs)) of - true -> - Exports ++ sets:to_list(Names); - false -> - Exports - end. - -filter_forms(Fs, Names, Used, Opts) -> - Keep = case proplists:get_bool(keep_unused, Opts) of - true -> - Names; - false -> - Used - end, - [F || F <- Fs, keep_form(F, Keep, Opts)]. - -keep_form(Form, Used, Opts) -> - case erl_syntax:type(Form) of - function -> - N = erl_syntax_lib:analyze_function(Form), - case sets:is_element(N, Used) of - false -> - {F, A} = N, - File = proplists:get_value(file, Opts, ""), - report({File, erl_syntax:get_pos(Form), - "removing unused function `~w/~w'."}, - [F, A], Opts), - false; - true -> - true - end; - attribute -> - case erl_syntax_lib:analyze_attribute(Form) of - {file, _} -> - false; - _ -> - true - end; - error_marker -> - false; - warning_marker -> - false; - eof_marker -> - false; - _ -> - true - end. - -collect_functions(Forms) -> - lists:foldl( - fun (F, {Names, Defs}) -> - case erl_syntax:type(F) of - function -> - N = erl_syntax_lib:analyze_function(F), - {sets:add_element(N, Names), - dict:store(N, {F, []}, Defs)}; - _ -> - {Names, Defs} - end - end, - {sets:new(), dict:new()}, - Forms). - -update_forms([F | Fs], Defs, Imports, Opts) -> - case erl_syntax:type(F) of - function -> - N = erl_syntax_lib:analyze_function(F), - {F1, Fs1} = dict:fetch(N, Defs), - [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, - Opts); - attribute -> - [update_attribute(F, Imports, Opts) - | update_forms(Fs, Defs, Imports, Opts)]; - _ -> - [F | update_forms(Fs, Defs, Imports, Opts)] - end; -update_forms([], _, _, _) -> - []. - -update_attribute(F, Imports, Opts) -> - case erl_syntax_lib:analyze_attribute(F) of - {import, {M, Ns}} -> - Ns1 = ordsets:from_list([N || N <- Ns, - sets:is_element(N, Imports)]), - case ordsets:subtract(ordsets:from_list(Ns), Ns1) of - [] -> - ok; - Names -> - File = proplists:get_value(file, Opts, ""), - report({File, erl_syntax:get_pos(F), - "removing unused imports:~s"}, - [[io_lib:fwrite("\n\t`~w:~w/~w'", [M, N, A]) - || {N, A} <- Names]], Opts) - end, - Is = [make_fname(N) || N <- Ns1], - if Is =:= [] -> - %% This will be filtered out later. - erl_syntax:warning_marker(deleted); - true -> - F1 = erl_syntax:attribute(erl_syntax:atom(import), - [erl_syntax:atom(M), - erl_syntax:list(Is)]), - rewrite(F, F1) - end; - {export, Ns} -> - Es = [make_fname(N) || N <- ordsets:from_list(Ns)], - F1 = erl_syntax:attribute(erl_syntax:atom(export), - [erl_syntax:list(Es)]), - rewrite(F, F1); - _ -> - F - end. - -make_fname({F, A}) -> - erl_syntax:arity_qualifier(erl_syntax:atom(F), - erl_syntax:integer(A)). - -hidden_uses(Fs, Imports) -> - Used = lists:foldl(fun (F, S) -> - case erl_syntax:type(F) of - attribute -> - hidden_uses_1(F, S); - _ -> - S - end - end, - [], Fs), - ordsets:subtract(Used, ordsets:from_list([F || {F, _M} <- Imports])). - -hidden_uses_1(Tree, Used) -> - erl_syntax_lib:fold(fun hidden_uses_2/2, Used, Tree). - -hidden_uses_2(Tree, Used) -> - case erl_syntax:type(Tree) of - application -> - F = erl_syntax:application_operator(Tree), - case erl_syntax:type(F) of - atom -> - As = erl_syntax:application_arguments(Tree), - N = {erl_syntax:atom_value(F), length(As)}, - case is_auto_imported(N) of - true -> - Used; - false -> - ordsets:add_element(N, Used) - end; - _ -> - Used - end; - implicit_fun -> - F = erl_syntax:implicit_fun_name(Tree), - case catch {ok, erl_syntax_lib:analyze_function_name(F)} of - {ok, {Name, Arity} = N} - when is_atom(Name), is_integer(Arity) -> - ordsets:add_element(N, Used); - _ -> - Used - end; - _ -> - Used - end. - --type fa() :: {atom(), arity()}. --type context() :: 'guard_expr' | 'guard_test' | 'normal'. - --record(env, {file :: file:filename(), - module :: atom(), - current :: fa() | 'undefined', - imports = dict:new() :: dict:dict(atom(), atom()), - context = normal :: context(), - verbosity = 1 :: 0 | 1 | 2, - quiet = false :: boolean(), - no_imports = false :: boolean(), - spawn_funs = false :: boolean(), - auto_list_comp = true :: boolean(), - auto_export_vars = false :: boolean(), - new_guard_tests = true :: boolean(), - old_guard_tests = false :: boolean()}). - --record(st, {varc :: non_neg_integer() | 'undefined', - used = sets:new() :: sets:set({atom(), arity()}), - imported :: sets:set({atom(), arity()}), - vars :: sets:set(atom()) | 'undefined', - functions :: sets:set({atom(), arity()}), - new_forms = [] :: [erl_syntax:syntaxTree()], - rename :: dict:dict(mfa(), {atom(), atom()})}). - -visit_used(Names, Defs, Roots, Imports, Module, Opts) -> - File = proplists:get_value(file, Opts, ""), - NoImports = proplists:get_bool(no_imports, Opts), - Rename = proplists:append_values(rename, Opts), - loop(Roots, sets:new(), Defs, - #env{file = File, - module = Module, - imports = dict:from_list(Imports), - verbosity = verbosity(Opts), - no_imports = NoImports, - spawn_funs = proplists:get_bool(spawn_funs, Opts), - auto_list_comp = proplists:get_bool(auto_list_comp, Opts), - auto_export_vars = proplists:get_bool(auto_export_vars, - Opts), - new_guard_tests = proplists:get_bool(new_guard_tests, - Opts), - old_guard_tests = proplists:get_bool(old_guard_tests, - Opts)}, - #st{used = sets:from_list(Roots), - imported = sets:new(), - functions = Names, - rename = dict:from_list([X || {F1, F2} = X <- Rename, - is_remote_name(F1), - is_atom_pair(F2)])}). - -loop([F | Work], Seen0, Defs0, Env, St0) -> - case sets:is_element(F, Seen0) of - true -> - loop(Work, Seen0, Defs0, Env, St0); - false -> - Seen1 = sets:add_element(F, Seen0), - case dict:find(F, Defs0) of - {ok, {Form, Fs}} -> - Vars = erl_syntax_lib:variables(Form), - Form1 = erl_syntax_lib:annotate_bindings(Form, []), - {Form2, St1} = visit(Form1, Env#env{current = F}, - St0#st{varc = 1, - used = sets:new(), - vars = Vars, - new_forms = []}), - Fs1 = St1#st.new_forms ++ Fs, - Defs1 = dict:store(F, {Form2, Fs1}, Defs0), - Used = St1#st.used, - Work1 = sets:to_list(Used) ++ Work, - St2 = St1#st{used = sets:union(Used, St0#st.used)}, - loop(Work1, Seen1, Defs1, Env, St2); - error -> - %% Quietly ignore any names that have no definition. - loop(Work, Seen1, Defs0, Env, St0) - end - end; -loop([], _, Defs, _, St) -> - {St#st.functions, St#st.used, St#st.imported, Defs}. - -visit(Tree, Env, St0) -> - case erl_syntax:type(Tree) of - application -> - visit_application(Tree, Env, St0); - infix_expr -> - visit_infix_expr(Tree, Env, St0); - prefix_expr -> - visit_prefix_expr(Tree, Env, St0); - implicit_fun -> - visit_implicit_fun(Tree, Env, St0); - clause -> - visit_clause(Tree, Env, St0); - list_comp -> - visit_list_comp(Tree, Env, St0); - match_expr -> - visit_match_expr(Tree, Env, St0); - _ -> - visit_other(Tree, Env, St0) - end. - -visit_other(Tree, Env, St) -> - F = fun (T, S) -> visit(T, Env, S) end, - erl_syntax_lib:mapfold_subtrees(F, St, Tree). - -visit_list(Ts, Env, St0) -> - lists:mapfoldl(fun (T, S) -> visit(T, Env, S) end, St0, Ts). - -visit_implicit_fun(Tree, _Env, St0) -> - F = erl_syntax:implicit_fun_name(Tree), - case catch {ok, erl_syntax_lib:analyze_function_name(F)} of - {ok, {Name, Arity} = N} - when is_atom(Name), is_integer(Arity) -> - Used = sets:add_element(N, St0#st.used), - {Tree, St0#st{used = Used}}; - _ -> - %% symbolic funs do not count as uses of a function - {Tree, St0} - end. - -visit_clause(Tree, Env, St0) -> - %% We do not visit the patterns (for now, anyway). - Ps = erl_syntax:clause_patterns(Tree), - {G, St1} = case erl_syntax:clause_guard(Tree) of - none -> - {none, St0}; - G0 -> - visit(G0, Env#env{context = guard_test}, St0) - end, - {B, St2} = visit_list(erl_syntax:clause_body(Tree), Env, St1), - {rewrite(Tree, erl_syntax:clause(Ps, G, B)), St2}. - -visit_infix_expr(Tree, #env{context = guard_test}, St0) -> - %% Detect transition from guard test to guard expression. - visit_other(Tree, #env{context = guard_expr, file = ""}, St0); -visit_infix_expr(Tree, Env, St0) -> - visit_other(Tree, Env, St0). - -visit_prefix_expr(Tree, #env{context = guard_test}, St0) -> - %% Detect transition from guard test to guard expression. - visit_other(Tree, #env{context = guard_expr, file = ""}, St0); -visit_prefix_expr(Tree, Env, St0) -> - visit_other(Tree, Env, St0). - -visit_application(Tree, Env, St0) -> - Env1 = case Env of - #env{context = guard_test} -> - Env#env{context = guard_expr}; - _ -> - Env - end, - {F, St1} = visit(erl_syntax:application_operator(Tree), Env1, St0), - {As, St2} = visit_list(erl_syntax:application_arguments(Tree), Env1, - St1), - case erl_syntax:type(F) of - atom -> - visit_atom_application(F, As, Tree, Env, St2); - implicit_fun -> - visit_named_fun_application(F, As, Tree, Env, St2); - fun_expr -> - visit_lambda_application(F, As, Tree, Env, St2); - _ -> - visit_nonlocal_application(F, As, Tree, Env, St2) - end. - -visit_application_final(F, As, Tree, St0) -> - {rewrite(Tree, erl_syntax:application(F, As)), St0}. - -revisit_application(F, As, Tree, Env, St0) -> - visit(rewrite(Tree, erl_syntax:application(F, As)), Env, St0). - -visit_atom_application(F, As, Tree, #env{context = guard_test} = Env, - St0) -> - N = erl_syntax:atom_value(F), - A = length(As), - N1 = case Env#env.old_guard_tests of - true -> - reverse_guard_test(N, A); - false -> - case Env#env.new_guard_tests of - true -> - rewrite_guard_test(N, A); - false -> - N - end - end, - if N1 =/= N -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing guard test `~w' to `~w'."}, - [N, N1], Env#env.verbosity); - true -> - ok - end, - %% No need to revisit here. - F1 = rewrite(F, erl_syntax:atom(N1)), - visit_application_final(F1, As, Tree, St0); -visit_atom_application(F, As, Tree, #env{context = guard_expr}, St0) -> - %% Atom applications in guard expressions are never local calls. - visit_application_final(F, As, Tree, St0); -visit_atom_application(F, As, Tree, Env, St0) -> - N = {erl_syntax:atom_value(F), length(As)}, - case is_auto_imported(N) of - true -> - visit_bif_call(N, F, As, Tree, Env, St0); - false -> - case is_imported(N, Env) of - true -> - visit_import_application(N, F, As, Tree, Env, St0); - false -> - Used = sets:add_element(N, St0#st.used), - visit_application_final(F, As, Tree, - St0#st{used = Used}) - end - end. - -visit_import_application({N, A} = Name, F, As, Tree, Env, St0) -> - M = dict:fetch(Name, Env#env.imports), - Expand = case Env#env.no_imports of - true -> - true; - false -> - auto_expand_import({M, N, A}, St0) - end, - case Expand of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "expanding call to imported function `~w:~w/~w'."}, - [M, N, A], Env#env.verbosity), - F1 = erl_syntax:module_qualifier(erl_syntax:atom(M), - erl_syntax:atom(N)), - revisit_application(rewrite(F, F1), As, Tree, Env, St0); - false -> - Is = sets:add_element(Name, St0#st.imported), - visit_application_final(F, As, Tree, St0#st{imported = Is}) - end. - -visit_bif_call({apply, 2}, F, [E, Args] = As, Tree, Env, St0) -> - case erl_syntax:is_proper_list(Args) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing use of `apply/2' " - "to direct function call."}, - [], Env#env.verbosity), - As1 = erl_syntax:list_elements(Args), - revisit_application(E, As1, Tree, Env, St0); - false -> - visit_application_final(F, As, Tree, St0) - end; -visit_bif_call({apply, 3}, F, [M, N, Args] = As, Tree, Env, St0) -> - case erl_syntax:is_proper_list(Args) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing use of `apply/3' " - "to direct remote call."}, - [], Env#env.verbosity), - F1 = rewrite(F, erl_syntax:module_qualifier(M, N)), - As1 = erl_syntax:list_elements(Args), - visit_nonlocal_application(F1, As1, Tree, Env, St0); - false -> - visit_application_final(F, As, Tree, St0) - end; -visit_bif_call({spawn, 3} = N, F, [_, _, _] = As, Tree, Env, St0) -> - visit_spawn_call(N, F, [], As, Tree, Env, St0); -visit_bif_call({spawn_link, 3} = N, F, [_, _, _] = As, Tree, Env, - St0) -> - visit_spawn_call(N, F, [], As, Tree, Env, St0); -visit_bif_call({spawn, 4} = N, F, [A | [_, _, _] = As], Tree, Env, - St0) -> - visit_spawn_call(N, F, [A], As, Tree, Env, St0); -visit_bif_call({spawn_link, 4} = N, F, [A | [_, _, _] = As], Tree, Env, - St0) -> - visit_spawn_call(N, F, [A], As, Tree, Env, St0); -visit_bif_call(_, F, As, Tree, _Env, St0) -> - visit_application_final(F, As, Tree, St0). - -visit_spawn_call({N, A}, F, Ps, [A1, A2, A3] = As, Tree, - #env{spawn_funs = true} = Env, St0) -> - case erl_syntax:is_proper_list(A3) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing use of `~w/~w' to `~w/~w' with a fun."}, - [N, A, N, 1 + length(Ps)], Env#env.verbosity), - F1 = case erl_syntax:is_atom(A1, Env#env.module) of - true -> - A2; % calling self - false -> - clone(A1, - erl_syntax:module_qualifier(A1, A2)) - end, - %% Need to do some scoping tricks here to make sure the - %% arguments are evaluated by the parent, not by the spawned - %% process. - As1 = erl_syntax:list_elements(A3), - {Vs, St1} = new_variables(length(As1), St0), - E1 = clone(F1, erl_syntax:application(F1, Vs)), - C1 = clone(E1, erl_syntax:clause([], [E1])), - E2 = clone(C1, erl_syntax:fun_expr([C1])), - C2 = clone(E2, erl_syntax:clause(Vs, [], [E2])), - E3 = clone(C2, erl_syntax:fun_expr([C2])), - E4 = clone(E3, erl_syntax:application(E3, As1)), - E5 = erl_syntax_lib:annotate_bindings(E4, get_env(A1)), - {E6, St2} = visit(E5, Env, St1), - F2 = rewrite(F, erl_syntax:atom(N)), - visit_nonlocal_application(F2, Ps ++ [E6], Tree, Env, St2); - false -> - visit_application_final(F, Ps ++ As, Tree, St0) - end; -visit_spawn_call(_, F, Ps, As, Tree, _Env, St0) -> - visit_application_final(F, Ps ++ As, Tree, St0). - -visit_named_fun_application(F, As, Tree, Env, St0) -> - Name = erl_syntax:implicit_fun_name(F), - case catch {ok, erl_syntax_lib:analyze_function_name(Name)} of - {ok, {A, N}} when is_atom(A), is_integer(N), N =:= length(As) -> - case is_nonlocal({A, N}, Env) of - true -> - %% Making this a direct call would be an error. - visit_application_final(F, As, Tree, St0); - false -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing application of implicit fun " - "to direct local call."}, - [], Env#env.verbosity), - Used = sets:add_element({A, N}, St0#st.used), - F1 = rewrite(F, erl_syntax:atom(A)), - revisit_application(F1, As, Tree, Env, - St0#st{used = Used}) - end; - _ -> - visit_application_final(F, As, Tree, St0) - end. - -visit_lambda_application(F, As, Tree, Env, St0) -> - A = erl_syntax:fun_expr_arity(F), - case A =:= length(As) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing application of fun-expression " - "to local function call."}, - [], Env#env.verbosity), - {Base, _} = Env#env.current, - Free = [erl_syntax:variable(V) || V <- get_free_vars(F)], - N = length(Free), - A1 = A + N, - {Name, St1} = new_fname({Base, A1}, St0), - Cs = augment_clauses(erl_syntax:fun_expr_clauses(F), Free), - F1 = erl_syntax:atom(Name), - New = rewrite(F, erl_syntax:function(F1, Cs)), - Used = sets:add_element({Name, A1}, St1#st.used), - Forms = [New | St1#st.new_forms], - St2 = St1#st{new_forms = Forms, used = Used}, - visit_application_final(F1, As ++ Free, Tree, St2); - false -> - warn({Env#env.file, erl_syntax:get_pos(F), - "arity mismatch in fun-expression application."}, - [], Env#env.verbosity), - visit_application_final(F, As, Tree, St0) - end. - -augment_clauses(Cs, Vs) -> - [begin - Ps = erl_syntax:clause_patterns(C), - G = erl_syntax:clause_guard(C), - Es = erl_syntax:clause_body(C), - rewrite(C, erl_syntax:clause(Ps ++ Vs, G, Es)) - end - || C <- Cs]. - -visit_nonlocal_application(F, As, Tree, Env, St0) -> - case erl_syntax:type(F) of - tuple -> - case erl_syntax:tuple_elements(F) of - [X1, X2] -> - report({Env#env.file, erl_syntax:get_pos(F), - "changing application of 2-tuple " - "to direct remote call."}, - [], Env#env.verbosity), - F1 = erl_syntax:module_qualifier(X1, X2), - revisit_application(rewrite(F, F1), As, Tree, Env, - St0); - _ -> - visit_application_final(F, As, Tree, St0) - end; - module_qualifier -> - case catch {ok, erl_syntax_lib:analyze_function_name(F)} of - {ok, {M, N}} when is_atom(M), is_atom(N) -> - visit_remote_application({M, N, length(As)}, F, As, - Tree, Env, St0); - _ -> - visit_application_final(F, As, Tree, St0) - end; - _ -> - visit_application_final(F, As, Tree, St0) - end. - -%% --- lists:append/2 and lists:subtract/2 --- -visit_remote_application({lists, append, 2}, F, [A1, A2], Tree, Env, - St0) -> - report({Env#env.file, erl_syntax:get_pos(F), - "replacing call to `lists:append/2' " - "with the `++' operator."}, - [], Env#env.verbosity), - Tree1 = erl_syntax:infix_expr(A1, erl_syntax:operator('++'), A2), - visit(rewrite(Tree, Tree1), Env, St0); -visit_remote_application({lists, subtract, 2}, F, [A1, A2], Tree, Env, - St0) -> - report({Env#env.file, erl_syntax:get_pos(F), - "replacing call to `lists:subtract/2' " - "with the `--' operator."}, - [], Env#env.verbosity), - Tree1 = erl_syntax:infix_expr(A1, erl_syntax:operator('--'), A2), - visit(rewrite(Tree, Tree1), Env, St0); -%% --- lists:map/2 and lists:filter/2 --- -visit_remote_application({lists, filter, 2}, F, [A1, A2] = As, Tree, - Env, St0) -> - case Env#env.auto_list_comp - and (erl_syntax:type(A1) =/= variable) - and (get_var_exports(A1) =:= []) - and (get_var_exports(A2) =:= []) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "replacing call to `lists:filter/2' " - "with a list comprehension."}, - [], Env#env.verbosity), - {V, St1} = new_variable(St0), - G = clone(A2, erl_syntax:generator(V, A2)), - T = clone(A1, erl_syntax:application(A1, [V])), - L = erl_syntax:list_comp(V, [G, T]), - L1 = erl_syntax_lib:annotate_bindings(L, get_env(Tree)), - visit(rewrite(Tree, L1), Env, St1); - false -> - visit_application_final(F, As, Tree, St0) - end; -visit_remote_application({lists, map, 2}, F, [A1, A2] = As, Tree, Env, - St0) -> - case Env#env.auto_list_comp - and (erl_syntax:type(A1) =/= variable) - and (get_var_exports(A1) =:= []) - and (get_var_exports(A2) =:= []) of - true -> - report({Env#env.file, erl_syntax:get_pos(F), - "replacing call to `lists:map/2' " - "with a list comprehension."}, - [], Env#env.verbosity), - {V, St1} = new_variable(St0), - T = clone(A1, erl_syntax:application(A1, [V])), - G = clone(A2, erl_syntax:generator(V, A2)), - L = erl_syntax:list_comp(T, [G]), - L1 = erl_syntax_lib:annotate_bindings(L, get_env(Tree)), - visit(rewrite(Tree, L1), Env, St1); - false -> - visit_application_final(F, As, Tree, St0) - end; -%% --- all other functions --- -visit_remote_application({M, N, A} = Name, F, As, Tree, Env, St) -> - case is_auto_imported(Name) of - true -> - %% We don't remove the qualifier - it might be there for the - %% sake of clarity. - visit_bif_call({N, A}, F, As, Tree, Env, St); - false -> - case rename_remote_call(Name, St) of - {M1, N1} -> - report({Env#env.file, erl_syntax:get_pos(F), - "updating obsolete call to `~w:~w/~w' " - "to use `~w:~w/~w' instead."}, - [M, N, A, M1, N1, A], Env#env.verbosity), - M2 = erl_syntax:atom(M1), - N2 = erl_syntax:atom(N1), - F1 = erl_syntax:module_qualifier(M2, N2), - revisit_application(rewrite(F, F1), As, Tree, Env, - St); - false -> - visit_application_final(F, As, Tree, St) - end - end. - --spec auto_expand_import(mfa(), #st{}) -> boolean(). - -auto_expand_import({lists, append, 2}, _St) -> true; -auto_expand_import({lists, subtract, 2}, _St) -> true; -auto_expand_import({lists, filter, 2}, _St) -> true; -auto_expand_import({lists, map, 2}, _St) -> true; -auto_expand_import(Name, St) -> - case is_auto_imported(Name) of - true -> - true; - false -> - rename_remote_call(Name, St) =/= false - end. - -visit_list_comp(Tree, Env, St0) -> - Es = erl_syntax:list_comp_body(Tree), - {Es1, St1} = visit_list_comp_body(Es, Env, St0), - {T, St2} = visit(erl_syntax:list_comp_template(Tree), Env, St1), - {rewrite(Tree, erl_syntax:list_comp(T, Es1)), St2}. - -visit_list_comp_body_join(Env) -> - fun (E, St0) -> - case is_generator(E) of - true -> - visit_generator(E, Env, St0); - false -> - visit_filter(E, Env, St0) - end - end. - -visit_list_comp_body(Es, Env, St0) -> - lists:mapfoldl(visit_list_comp_body_join(Env), St0, Es). - -%% 'visit_filter' also handles uninteresting generators. - -visit_filter(E, Env, St0) -> - visit(E, Env, St0). - -%% "interesting" generators have the form V <- [V || ...]; this can be -%% unfolded as long as no bindings become erroneously shadowed. - -visit_generator(G, Env, St0) -> - P = erl_syntax:generator_pattern(G), - case erl_syntax:type(P) of - variable -> - B = erl_syntax:generator_body(G), - case erl_syntax:type(B) of - list_comp -> - T = erl_syntax:list_comp_template(B), - case erl_syntax:type(T) of - variable -> - visit_generator_1(G, Env, St0); - _ -> - visit_filter(G, Env, St0) - end; - _ -> - visit_filter(G, Env, St0) - end; - _ -> - visit_filter(G, Env, St0) - end. - -visit_generator_1(G, Env, St0) -> - recommend({Env#env.file, erl_syntax:get_pos(G), - "unfold that this nested list comprehension can be unfolded " - "by hand to get better efficiency."}, - [], Env#env.verbosity), - visit_filter(G, Env, St0). - -visit_match_expr(Tree, Env, St0) -> - %% We do not visit the pattern (for now, anyway). - P = erl_syntax:match_expr_pattern(Tree), - {B, St1} = visit(erl_syntax:match_expr_body(Tree), Env, St0), - case erl_syntax:type(P) of - tuple -> - Ps = erl_syntax:tuple_elements(P), - case lists:all(fun is_variable/1, Ps) of - true -> - Vs = lists:sort([erl_syntax:variable_name(X) - || X <- Ps]), - case ordsets:is_set(Vs) of - true -> - Xs = get_var_exports(B), - case ordsets:intersection(Vs, Xs) of - [] -> - visit_match_body(Ps, P, B, Tree, - Env, St1); - _ -> - visit_match_expr_final(P, B, Tree, - Env, St1) - end; - false -> - visit_match_expr_final(P, B, Tree, Env, St1) - end; - false -> - visit_match_expr_final(P, B, Tree, Env, St1) - end; - _ -> - visit_match_expr_final(P, B, Tree, Env, St1) - end. - -visit_match_expr_final(P, B, Tree, _Env, St0) -> - {rewrite(Tree, erl_syntax:match_expr(P, B)), St0}. - -visit_match_body(_Ps, P, B, Tree, #env{auto_export_vars = false} = Env, - St0) -> - visit_match_expr_final(P, B, Tree, Env, St0); -visit_match_body(Ps, P, B, Tree, Env, St0) -> - case erl_syntax:type(B) of - case_expr -> - Cs = erl_syntax:case_expr_clauses(B), - case multival_clauses(Cs, length(Ps), Ps) of - {true, Cs1} -> - report_export_vars(Env#env.file, - erl_syntax:get_pos(B), - "case", Env#env.verbosity), - A = erl_syntax:case_expr_argument(B), - Tree1 = erl_syntax:case_expr(A, Cs1), - {rewrite(Tree, Tree1), St0}; - false -> - visit_match_expr_final(P, B, Tree, Env, St0) - end; - if_expr -> - Cs = erl_syntax:if_expr_clauses(B), - case multival_clauses(Cs, length(Ps), Ps) of - {true, Cs1} -> - report_export_vars(Env#env.file, - erl_syntax:get_pos(B), - "if", Env#env.verbosity), - Tree1 = erl_syntax:if_expr(Cs1), - {rewrite(Tree, Tree1), St0}; - false -> - visit_match_expr_final(P, B, Tree, Env, St0) - end; - cond_expr -> - Cs = erl_syntax:cond_expr_clauses(B), - case multival_clauses(Cs, length(Ps), Ps) of - {true, Cs1} -> - report_export_vars(Env#env.file, - erl_syntax:get_pos(B), - "cond", Env#env.verbosity), - Tree1 = erl_syntax:cond_expr(Cs1), - {rewrite(Tree, Tree1), St0}; - false -> - visit_match_expr_final(P, B, Tree, Env, St0) - end; - receive_expr -> - %% Handle the timeout case as an extra clause. - As = erl_syntax:receive_expr_action(B), - C = erl_syntax:clause([], As), - Cs = erl_syntax:receive_expr_clauses(B), - case multival_clauses([C | Cs], length(Ps), Ps) of - {true, [C1 | Cs1]} -> - report_export_vars(Env#env.file, - erl_syntax:get_pos(B), - "receive", Env#env.verbosity), - T = erl_syntax:receive_expr_timeout(B), - As1 = erl_syntax:clause_body(C1), - Tree1 = erl_syntax:receive_expr(Cs1, T, As1), - {rewrite(Tree, Tree1), St0}; - false -> - visit_match_expr_final(P, B, Tree, Env, St0) - end; - _ -> - visit_match_expr_final(P, B, Tree, Env, St0) - end. - -multival_clauses(Cs, N, Vs) -> - multival_clauses(Cs, N, Vs, []). - -multival_clauses([C | Cs], N, Vs, Cs1) -> - case erl_syntax:clause_body(C) of - [] -> - false; - Es -> - E = lists:last(Es), - case erl_syntax:type(E) of - tuple -> - Ts = erl_syntax:tuple_elements(E), - if length(Ts) =:= N -> - Bs = make_matches(E, Vs, Ts), - Es1 = replace_last(Es, Bs), - Ps = erl_syntax:clause_patterns(C), - G = erl_syntax:clause_guard(C), - C1 = erl_syntax:clause(Ps, G, Es1), - multival_clauses(Cs, N, Vs, - [rewrite(C, C1) | Cs1]); - true -> - false - end; - _ -> - case erl_syntax_lib:is_fail_expr(E) of - true -> - %% We must add dummy bindings here so we - %% don't introduce compilation errors due to - %% "unsafe" variable exports. - Bs = make_matches(Vs, - erl_syntax:atom(false)), - Es1 = replace_last(Es, Bs ++ [E]), - Ps = erl_syntax:clause_patterns(C), - G = erl_syntax:clause_guard(C), - C1 = erl_syntax:clause(Ps, G, Es1), - multival_clauses(Cs, N, Vs, - [rewrite(C, C1) | Cs1]); - false -> - false - end - end - end; -multival_clauses([], _N, _Vs, Cs) -> - {true, lists:reverse(Cs)}. - -make_matches(E, Vs, Ts) -> - case make_matches(Vs, Ts) of - [] -> - []; - [B | Bs] -> - [rewrite(E, B) | Bs] % preserve comments on E (but not B) - end. - -make_matches([V | Vs], [T | Ts]) -> - [erl_syntax:match_expr(V, T) | make_matches(Vs, Ts)]; -make_matches([V | Vs], T) when T =/= [] -> - [erl_syntax:match_expr(V, T) | make_matches(Vs, T)]; -make_matches([], _) -> - []. - -rename_remote_call(F, St) -> - case dict:find(F, St#st.rename) of - error -> - rename_remote_call_1(F); - {ok, F1} -> F1 - end. - --spec rename_remote_call_1(mfa()) -> {atom(), atom()} | 'false'. - -rename_remote_call_1({dict, dict_to_list, 1}) -> {dict, to_list}; -rename_remote_call_1({dict, list_to_dict, 1}) -> {dict, from_list}; -rename_remote_call_1({erl_eval, arg_list, 2}) -> {erl_eval, expr_list}; -rename_remote_call_1({erl_eval, arg_list, 3}) -> {erl_eval, expr_list}; -rename_remote_call_1({erl_eval, seq, 2}) -> {erl_eval, exprs}; -rename_remote_call_1({erl_eval, seq, 3}) -> {erl_eval, exprs}; -rename_remote_call_1({erl_pp, seq, 1}) -> {erl_eval, seq}; -rename_remote_call_1({erl_pp, seq, 2}) -> {erl_eval, seq}; -rename_remote_call_1({erlang, info, 1}) -> {erlang, system_info}; -rename_remote_call_1({io, parse_erl_seq, 1}) -> {io, parse_erl_exprs}; -rename_remote_call_1({io, parse_erl_seq, 2}) -> {io, parse_erl_exprs}; -rename_remote_call_1({io, parse_erl_seq, 3}) -> {io, parse_erl_exprs}; -rename_remote_call_1({io, scan_erl_seq, 1}) -> {io, scan_erl_exprs}; -rename_remote_call_1({io, scan_erl_seq, 2}) -> {io, scan_erl_exprs}; -rename_remote_call_1({io, scan_erl_seq, 3}) -> {io, scan_erl_exprs}; -rename_remote_call_1({io_lib, reserved_word, 1}) -> {erl_scan, reserved_word}; -rename_remote_call_1({io_lib, scan, 1}) -> {erl_scan, string}; -rename_remote_call_1({io_lib, scan, 2}) -> {erl_scan, string}; -rename_remote_call_1({io_lib, scan, 3}) -> {erl_scan, tokens}; -rename_remote_call_1({orddict, dict_to_list, 1}) -> {orddict, to_list}; -rename_remote_call_1({orddict, list_to_dict, 1}) -> {orddict, from_list}; -rename_remote_call_1({ordsets, list_to_set, 1}) -> {ordsets, from_list}; -rename_remote_call_1({ordsets, new_set, 0}) -> {ordsets, new}; -rename_remote_call_1({ordsets, set_to_list, 1}) -> {ordsets, to_list}; -rename_remote_call_1({ordsets, subset, 2}) -> {ordsets, is_subset}; -rename_remote_call_1({sets, list_to_set, 1}) -> {sets, from_list}; -rename_remote_call_1({sets, new_set, 0}) -> {sets, new}; -rename_remote_call_1({sets, set_to_list, 1}) -> {sets, to_list}; -rename_remote_call_1({sets, subset, 2}) -> {sets, is_subset}; -rename_remote_call_1({string, index, 2}) -> {string, str}; -rename_remote_call_1({unix, cmd, 1}) -> {os, cmd}; -rename_remote_call_1(_) -> false. - --spec rewrite_guard_test(atom(), arity()) -> atom(). - -rewrite_guard_test(atom, 1) -> is_atom; -rewrite_guard_test(binary, 1) -> is_binary; -rewrite_guard_test(constant, 1) -> is_constant; -rewrite_guard_test(float, 1) -> is_float; -rewrite_guard_test(function, 1) -> is_function; -rewrite_guard_test(function, 2) -> is_function; -rewrite_guard_test(integer, 1) -> is_integer; -rewrite_guard_test(list, 1) -> is_list; -rewrite_guard_test(number, 1) -> is_number; -rewrite_guard_test(pid, 1) -> is_pid; -rewrite_guard_test(port, 1) -> is_port; -rewrite_guard_test(reference, 1) -> is_reference; -rewrite_guard_test(tuple, 1) -> is_tuple; -rewrite_guard_test(record, 2) -> is_record; -rewrite_guard_test(record, 3) -> is_record; -rewrite_guard_test(N, _A) -> N. - --spec reverse_guard_test(atom(), arity()) -> atom(). - -reverse_guard_test(is_atom, 1) -> atom; -reverse_guard_test(is_binary, 1) -> binary; -reverse_guard_test(is_constant, 1) -> constant; -reverse_guard_test(is_float, 1) -> float; -reverse_guard_test(is_function, 1) -> function; -reverse_guard_test(is_function, 2) -> function; -reverse_guard_test(is_integer, 1) -> integer; -reverse_guard_test(is_list, 1) -> list; -reverse_guard_test(is_number, 1) -> number; -reverse_guard_test(is_pid, 1) -> pid; -reverse_guard_test(is_port, 1) -> port; -reverse_guard_test(is_reference, 1) -> reference; -reverse_guard_test(is_tuple, 1) -> tuple; -reverse_guard_test(is_record, 2) -> record; -reverse_guard_test(is_record, 3) -> record; -reverse_guard_test(N, _A) -> N. - - -%% ===================================================================== -%% Utility functions - -is_remote_name({M,F,A}) when is_atom(M), is_atom(F), is_integer(A) -> true; -is_remote_name(_) -> false. - -is_atom_pair({M,F}) when is_atom(M), is_atom(F) -> true; -is_atom_pair(_) -> false. - -replace_last([_E], Xs) -> - Xs; -replace_last([E | Es], Xs) -> - [E | replace_last(Es, Xs)]. - -is_generator(E) -> - erl_syntax:type(E) =:= generator. - -is_variable(E) -> - erl_syntax:type(E) =:= variable. - -new_variables(N, St0) when N > 0 -> - {V, St1} = new_variable(St0), - {Vs, St2} = new_variables(N - 1, St1), - {[V | Vs], St2}; -new_variables(0, St) -> - {[], St}. - -new_variable(St0) -> - Fun = fun (N) -> - list_to_atom("V" ++ integer_to_list(N)) - end, - Vs = St0#st.vars, - {Name, N} = new_name(St0#st.varc, Fun, Vs), - St1 = St0#st{varc = N + 1, vars = sets:add_element(Name, Vs)}, - {erl_syntax:variable(Name), St1}. - -new_fname({F, A}, St0) -> - Base = atom_to_list(F), - Fun = fun (N) -> - {list_to_atom(Base ++ "_" ++ integer_to_list(N)), A} - end, - Fs = St0#st.functions, - {{F1, _A} = Name, _N} = new_name(1, Fun, Fs), - {F1, St0#st{functions = sets:add_element(Name, Fs)}}. - -new_name(N, F, Set) -> - Name = F(N), - case sets:is_element(Name, Set) of - true -> - new_name(N + 1, F, Set); - false -> - {Name, N} - end. - -is_imported(F, Env) -> - dict:is_key(F, Env#env.imports). - -is_auto_imported({erlang, N, A}) -> - is_auto_imported({N, A}); -is_auto_imported({_, _N, _A}) -> - false; -is_auto_imported({N, A}) -> - erl_internal:bif(N, A). - -is_nonlocal(N, Env) -> - case is_imported(N, Env) of - true -> - true; - false -> - is_auto_imported(N) - end. - -get_var_exports(Node) -> - get_var_exports_1(erl_syntax:get_ann(Node)). - -get_var_exports_1([{bound, B} | _Bs]) -> B; -get_var_exports_1([_ | Bs]) -> get_var_exports_1(Bs); -get_var_exports_1([]) -> []. - -get_free_vars(Node) -> - get_free_vars_1(erl_syntax:get_ann(Node)). - -get_free_vars_1([{free, B} | _Bs]) -> B; -get_free_vars_1([_ | Bs]) -> get_free_vars_1(Bs); -get_free_vars_1([]) -> []. - -filename([C | T]) when is_integer(C), C > 0 -> - [C | filename(T)]; -filename([H|T]) -> - filename(H) ++ filename(T); -filename([]) -> - []; -filename(N) when is_atom(N) -> - atom_to_list(N); -filename(N) -> - report_error("bad filename: `~P'.", [N, 25]), - exit(error). - -get_env(Tree) -> - case lists:keyfind(env, 1, erl_syntax:get_ann(Tree)) of - {env, Env} -> - Env; - _ -> - [] - end. - -rewrite(Source, Target) -> - erl_syntax:copy_attrs(Source, Target). - -clone(Source, Target) -> - erl_syntax:copy_pos(Source, Target). - - -%% ===================================================================== -%% Reporting - -report_export_vars(F, L, Type, Opts) -> - report({F, L, "rewrote ~s-expression to export variables."}, - [Type], Opts). - -error_read_file(Name) -> - report_error("error reading file `~ts'.", [filename(Name)]). - -error_write_file(Name) -> - report_error("error writing to file `~ts'.", [filename(Name)]). - -error_backup_file(Name) -> - report_error("could not create backup of file `~ts'.", - [filename(Name)]). - -error_open_output(Name) -> - report_error("cannot open file `~ts' for output.", [filename(Name)]). - -verbosity(Opts) -> - case proplists:get_bool(quiet, Opts) of - true -> 0; - false -> - case proplists:get_value(verbose, Opts) of - true -> 2; - N when is_integer(N) -> N; - _ -> 1 - end - end. - -report_error(D) -> - report_error(D, []). - -report_error({F, L, D}, Vs) -> - report({F, L, {error, D}}, Vs); -report_error(D, Vs) -> - report({error, D}, Vs). - -%% warn(D, N) -> -%% warn(D, [], N). - -warn({F, L, D}, Vs, N) -> - report({F, L, {warning, D}}, Vs, N); -warn(D, Vs, N) -> - report({warning, D}, Vs, N). - -recommend(D, Vs, N) -> - report({recommend, D}, Vs, N). - -verbose(D, Vs, N) -> - report(2, D, Vs, N). - -report(D, Vs) -> - report(D, Vs, 1). - -report(D, Vs, N) -> - report(1, D, Vs, N). - -report(Level, _D, _Vs, N) when is_integer(N), N < Level -> - ok; -report(_Level, D, Vs, N) when is_integer(N) -> - io:put_chars(format(D, Vs)); -report(Level, D, Vs, Options) when is_list(Options) -> - report(Level, D, Vs, verbosity(Options)). - -format({error, D}, Vs) -> - ["error: ", format(D, Vs)]; -format({warning, D}, Vs) -> - ["warning: ", format(D, Vs)]; -format({recommend, D}, Vs) -> - ["recommendation: ", format(D, Vs)]; -format({"", L, D}, Vs) when is_integer(L), L > 0 -> - [io_lib:fwrite("~w: ", [L]), format(D, Vs)]; -format({"", _L, D}, Vs) -> - format(D, Vs); -format({F, L, D}, Vs) when is_integer(L), L > 0 -> - [io_lib:fwrite("~ts:~w: ", [filename(F), L]), format(D, Vs)]; -format({F, _L, D}, Vs) -> - [io_lib:fwrite("~ts: ", [filename(F)]), format(D, Vs)]; -format(S, Vs) when is_list(S) -> - [io_lib:fwrite(S, Vs), $\n]. - -%% ===================================================================== diff --git a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl b/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl deleted file mode 100644 index afb3875..0000000 --- a/_build/default/plugins/erl_tidy_prv_fmt/src/erl_tidy_prv_fmt.erl +++ /dev/null @@ -1,73 +0,0 @@ --module(erl_tidy_prv_fmt). - --behaviour(provider). - --export([init/1, - do/1, - format_error/1]). - --define(PROVIDER, fmt). --define(DEPS, [app_discovery]). - -%% =================================================================== -%% Public API -%% =================================================================== - --spec init(rebar_state:state()) -> {ok, rebar_state:state()}. -init(State) -> - State1 = rebar_state:add_provider(State, providers:create([{name, ?PROVIDER}, - {module, ?MODULE}, - {bare, false}, - {deps, ?DEPS}, - {example, "rebar3 fmt"}, - {short_desc, "format modules."}, - {desc, ""}, - {opts, fmt_opts()}])), - {ok, State1}. - --spec do(rebar_state:state()) -> {ok, rebar_state:state()}. -do(State) -> - ConfigFileOpts = rebar_state:get(State, fmt_opts, []), - {CliOpts, _} = rebar_state:command_parsed_args(State), - % CLI opts take precedence over config file - Opts = rebar_utils:tup_umerge(ConfigFileOpts, CliOpts), - - ProjectApps = rebar_state:project_apps(State), - format_apps(Opts, ProjectApps), - {ok, State}. - --spec format_error(any()) -> iolist(). -format_error(Reason) -> - io_lib:format("~p", [Reason]). - -format_apps(Opts, Apps) -> - lists:foreach(fun(AppInfo) -> - SrcDir = filename:join(rebar_app_info:dir(AppInfo), "src"), - rebar_log:log(info, "Formating ~s...", [rebar_app_info:name(AppInfo)]), - erl_tidy:dir(SrcDir, Opts) - end, Apps). - -fmt_opts() -> - [{test, undefined, "test", {boolean, false}, - "do not modify files"}, - {verbose, undefined, "verbose", {boolean, true}, - "progress messages will be output while the program is running, " - "unless the `quiet' option is set"}, - {quiet, undefined, "quiet", {boolean, false}, - "all information messages and warning messages will be suppressed"}, - {auto_list_comp, undefined, "auto_list_comp", {boolean, false}, - "calls to `lists:map/2' and `lists:filter/2' will be rewritten " - "using list comprehensions"}, - {keep_unused, undefined, "keep_unused", {boolean, false}, - "unused functions will not be removed from the code"}, - {new_guard_tests, undefined, "new_guard_tests", {boolean, true}, - "guard tests will be updated to use the new names, " - "e.g. `is_integer(X)' instead of `integer(X)'"}, - {old_guard_tests, undefined, "old_guard_tests", {boolean, false}, - "guard tests will be changed to use the old names " - "instead of the new ones, e.g. `integer(X)' instead of `is_integer(X)'"}, - {no_imports, undefined, "no_imports", {boolean, false}, - "all import statements will be removed " - "and calls to imported functions will be expanded " - "to explicit remote calls"} - ]. From 921b5e2edd16a2be4418156366db3b5ace1fcdb9 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 17:22:11 +0700 Subject: [PATCH 5/9] formatting --- rebar.config | 3 +- src/sockjs_cowboy_handler.erl | 167 ++++++++++------ src/sockjs_filters.erl | 99 ++++++---- src/sockjs_handler.erl | 351 ++++++++++++++++++++-------------- src/sockjs_http.erl | 146 ++++++++------ 5 files changed, 465 insertions(+), 301 deletions(-) diff --git a/rebar.config b/rebar.config index c810042..eeeb3f2 100644 --- a/rebar.config +++ b/rebar.config @@ -11,8 +11,7 @@ ]}. {deps, [ - {cowboy, "2.8.0",{git, "https://github.com/ninenines/cowboy.git", {tag, "2.8.0"}}} - ]}. + {cowboy, "2.8.0", {git, "https://github.com/ninenines/cowboy.git", {tag, "2.8.0"}}} ]}. {project_plugins, [erlfmt]}. diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index 6e7a543..93efc49 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -6,82 +6,126 @@ -export([init/2, terminate/3]). %% Cowboy ws callbacks --export([websocket_handle/3, websocket_info/3, - websocket_init/3, websocket_terminate/3]). +-export([ + websocket_handle/3, + websocket_info/3, + websocket_init/3, + websocket_terminate/3 +]). -include("sockjs_internal.hrl"). %% -------------------------------------------------------------------------- init(#{ref := http} = Req, Service) -> - case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) - of - {true, _Reason} -> - {upgrade, protocol, cowboy_websocket}; - {false, _Reason} -> {ok, Req, Service} + case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of + {true, _Reason} -> + {upgrade, protocol, cowboy_websocket}; + {false, _Reason} -> + {ok, Req, Service} end. terminate(_Reason, _Req, _Service) -> ok. %% -------------------------------------------------------------------------- -websocket_init(_TransportName, Req, - Service = #service{logger = Logger, - subproto_pref = SubProtocolPref}) -> - Req1 = case cowboy_req:header('Sec-Websocket-Protocol', - Req) - of - undefined -> Req; - SubProtocols -> - SelectedSubProtocol = - choose_subprotocol_bin(SubProtocols, SubProtocolPref), - cowboy_req:set_resp_header(#{<<"Sec-Websocket-Protocol">> - => SelectedSubProtocol}, - Req) - end, +websocket_init( + _TransportName, + Req, + Service = #service{ + logger = Logger, + subproto_pref = SubProtocolPref + } +) -> + Req1 = + case + cowboy_req:header( + 'Sec-Websocket-Protocol', + Req + ) + of + undefined -> + Req; + SubProtocols -> + SelectedSubProtocol = + choose_subprotocol_bin(SubProtocols, SubProtocolPref), + cowboy_req:set_resp_header( + #{ + <<"Sec-Websocket-Protocol">> => + SelectedSubProtocol + }, + Req + ) + end, Logger(Service, {cowboy, Req1}, websocket), - Service1 = Service#service{disconnect_delay = - 5 * 60 * 1000}, + Service1 = Service#service{ + disconnect_delay = + 5 * 60 * 1000 + }, Info = sockjs_handler:extract_info(Req1), - SessionPid = sockjs_session:maybe_create(undefined, - Service1, Info), - RawWebsocket = case sockjs_handler:get_action(Service, - Req1) - of - {match, WS} - when WS =:= websocket orelse WS =:= rawwebsocket -> - WS - end, + SessionPid = sockjs_session:maybe_create( + undefined, + Service1, + Info + ), + RawWebsocket = + case + sockjs_handler:get_action( + Service, + Req1 + ) + of + {match, WS} when WS =:= websocket orelse WS =:= rawwebsocket -> + WS + end, self() ! go, {ok, Req1, {RawWebsocket, SessionPid}}. -websocket_handle({text, Data}, Req, - {RawWebsocket, SessionPid} = S) -> - case sockjs_ws_handler:received(RawWebsocket, - SessionPid, Data) - of - ok -> {ok, Req, S}; - shutdown -> {shutdown, Req, S} +websocket_handle( + {text, Data}, + Req, + {RawWebsocket, SessionPid} = S +) -> + case + sockjs_ws_handler:received( + RawWebsocket, + SessionPid, + Data + ) + of + ok -> {ok, Req, S}; + shutdown -> {shutdown, Req, S} end; websocket_handle(_Unknown, Req, S) -> {shutdown, Req, S}. -websocket_info(go, Req, - {RawWebsocket, SessionPid} = S) -> - case sockjs_ws_handler:reply(RawWebsocket, SessionPid) - of - wait -> {ok, Req, S}; - {ok, Data} -> - self() ! go, {reply, {text, Data}, Req, S}; - {close, <<>>} -> {shutdown, Req, S}; - {close, Data} -> - self() ! shutdown, {reply, {text, Data}, Req, S} +websocket_info( + go, + Req, + {RawWebsocket, SessionPid} = S +) -> + case sockjs_ws_handler:reply(RawWebsocket, SessionPid) of + wait -> + {ok, Req, S}; + {ok, Data} -> + self() ! go, + {reply, {text, Data}, Req, S}; + {close, <<>>} -> + {shutdown, Req, S}; + {close, Data} -> + self() ! shutdown, + {reply, {text, Data}, Req, S} end; -websocket_info(shutdown, Req, S) -> {shutdown, Req, S}. +websocket_info(shutdown, Req, S) -> + {shutdown, Req, S}. -websocket_terminate(_Reason, _Req, - {RawWebsocket, SessionPid}) -> - sockjs_ws_handler:close(RawWebsocket, SessionPid), ok. +websocket_terminate( + _Reason, + _Req, + {RawWebsocket, SessionPid} +) -> + sockjs_ws_handler:close(RawWebsocket, SessionPid), + ok. %% -------------------------------------------------------------------------- @@ -91,11 +135,14 @@ choose_subprotocol_bin(SubProtocols, Pref) -> choose_subprotocol(SubProtocols, undefined) -> erlang:hd(lists:reverse(lists:sort(SubProtocols))); choose_subprotocol(SubProtocols, Pref) -> - case lists:filter(fun (E) -> - lists:member(E, SubProtocols) - end, - Pref) - of - [Hd | _] -> Hd; - [] -> choose_subprotocol(SubProtocols, undefined) + case + lists:filter( + fun(E) -> + lists:member(E, SubProtocols) + end, + Pref + ) + of + [Hd | _] -> Hd; + [] -> choose_subprotocol(SubProtocols, undefined) end. diff --git a/src/sockjs_filters.erl b/src/sockjs_filters.erl index adf150e..345971e 100644 --- a/src/sockjs_filters.erl +++ b/src/sockjs_filters.erl @@ -1,7 +1,13 @@ -module(sockjs_filters). --export([cache_for/2, h_no_cache/2, h_sid/2, xhr_cors/2, - xhr_options_get/2, xhr_options_post/2]). +-export([ + cache_for/2, + h_no_cache/2, + h_sid/2, + xhr_cors/2, + xhr_options_get/2, + xhr_options_post/2 +]). -include("sockjs_internal.hrl"). @@ -13,11 +19,14 @@ cache_for(Req, Headers) -> Expires = - calendar:gregorian_seconds_to_datetime(calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(now())) - + (?YEAR)), - H = [{"Cache-Control", - "public, max-age=" ++ integer_to_list(?YEAR)}, - {"Expires", httpd_util:rfc1123_date(Expires)}], + calendar:gregorian_seconds_to_datetime( + calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(now())) + + (?YEAR) + ), + H = [ + {"Cache-Control", "public, max-age=" ++ integer_to_list(?YEAR)}, + {"Expires", httpd_util:rfc1123_date(Expires)} + ], {H ++ Headers, Req}. -spec h_sid(req(), headers()) -> {headers(), req()}. @@ -28,60 +37,70 @@ h_sid(Req, Headers) -> %% set it to a dumb value. It doesn't really matter what, as %% session information is usually added by the load balancer. C = sockjs_http:jsessionid(Req), - H = case C of - undefined -> - [{"Set-Cookie", "JSESSIONID=dummy; path=/"}]; - Jsid -> - [{"Set-Cookie", "JSESSIONID=" ++ Jsid ++ "; path=/"}] - end, + H = + case C of + undefined -> + [{"Set-Cookie", "JSESSIONID=dummy; path=/"}]; + Jsid -> + [{"Set-Cookie", "JSESSIONID=" ++ Jsid ++ "; path=/"}] + end, H ++ Headers. --spec h_no_cache(req(), headers()) -> {headers(), - req()}. +-spec h_no_cache(req(), headers()) -> {headers(), req()}. h_no_cache(Req, Headers) -> - H = [{"Cache-Control", - "no-store, no-cache, must-revalidate, " - "max-age=0"}], + H = [ + {"Cache-Control", + "no-store, no-cache, must-revalidate, " + "max-age=0"} + ], {H ++ Headers, Req}. -spec xhr_cors(req(), headers()) -> {headers(), req()}. xhr_cors(Req, Headers) -> {OriginH, Req1} = sockjs_http:header(origin, Req), - Origin = case OriginH of - "null" -> "*"; - undefined -> "*"; - O -> O - end, + Origin = + case OriginH of + "null" -> "*"; + undefined -> "*"; + O -> O + end, {HeadersH, Req2} = - sockjs_http:header('access-control-request-headers', - Req1), - AllowHeaders = case HeadersH of - undefined -> []; - V -> [{"Access-Control-Allow-Headers", V}] - end, - H = [{"Access-Control-Allow-Origin", Origin}, - {"Access-Control-Allow-Credentials", "true"}], + sockjs_http:header( + 'access-control-request-headers', + Req1 + ), + AllowHeaders = + case HeadersH of + undefined -> []; + V -> [{"Access-Control-Allow-Headers", V}] + end, + H = [ + {"Access-Control-Allow-Origin", Origin}, + {"Access-Control-Allow-Credentials", "true"} + ], {H ++ AllowHeaders ++ Headers, Req2}. --spec xhr_options_post(req(), headers()) -> {headers(), - req()}. +-spec xhr_options_post(req(), headers()) -> {headers(), req()}. xhr_options_post(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "POST"]). --spec xhr_options_get(req(), headers()) -> {headers(), - req()}. +-spec xhr_options_get(req(), headers()) -> {headers(), req()}. xhr_options_get(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "GET"]). --spec xhr_options(req(), headers(), - [string()]) -> {headers(), req()}. +-spec xhr_options( + req(), + headers(), + [string()] +) -> {headers(), req()}. xhr_options(Req, Headers, Methods) -> - H = [{"Access-Control-Allow-Methods", - string:join(Methods, ", ")}, - {"Access-Control-Max-Age", integer_to_list(?YEAR)}], + H = [ + {"Access-Control-Allow-Methods", string:join(Methods, ", ")}, + {"Access-Control-Max-Age", integer_to_list(?YEAR)} + ], {H ++ Headers, Req}. diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl index 7ac69b8..3745e4c 100644 --- a/src/sockjs_handler.erl +++ b/src/sockjs_handler.erl @@ -11,50 +11,62 @@ -include("sockjs_internal.hrl"). -define(SOCKJS_URL, - "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"). + "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js" +). %% -------------------------------------------------------------------------- --spec init_state(binary(), callback(), any(), - [tuple()]) -> service(). +-spec init_state( + binary(), + callback(), + any(), + [tuple()] +) -> service(). init_state(Prefix, Callback, State, Options) -> - #service{prefix = binary_to_list(Prefix), - callback = Callback, state = State, - sockjs_url = - proplists:get_value(sockjs_url, Options, ?SOCKJS_URL), - websocket = - proplists:get_value(websocket, Options, true), - cookie_needed = - proplists:get_value(cookie_needed, Options, false), - disconnect_delay = - proplists:get_value(disconnect_delay, Options, 5000), - heartbeat_delay = - proplists:get_value(heartbeat_delay, Options, 25000), - response_limit = - proplists:get_value(response_limit, Options, - 128 * 1024), - logger = - proplists:get_value(logger, Options, - fun default_logger/3), - subproto_pref = - proplists:get_value(subproto_pref, Options)}. + #service{ + prefix = binary_to_list(Prefix), + callback = Callback, + state = State, + sockjs_url = + proplists:get_value(sockjs_url, Options, ?SOCKJS_URL), + websocket = + proplists:get_value(websocket, Options, true), + cookie_needed = + proplists:get_value(cookie_needed, Options, false), + disconnect_delay = + proplists:get_value(disconnect_delay, Options, 5000), + heartbeat_delay = + proplists:get_value(heartbeat_delay, Options, 25000), + response_limit = + proplists:get_value( + response_limit, + Options, + 128 * 1024 + ), + logger = + proplists:get_value( + logger, + Options, + fun default_logger/3 + ), + subproto_pref = + proplists:get_value(subproto_pref, Options) + }. %% -------------------------------------------------------------------------- --spec is_valid_ws(service(), req()) -> {boolean(), - tuple()}. +-spec is_valid_ws(service(), req()) -> {boolean(), tuple()}. is_valid_ws(Service, Req) -> case get_action(Service, Req) of - {{match, WS}, Req1} - when WS =:= websocket orelse WS =:= rawwebsocket -> - valid_ws_request(Service, Req1); - {_Else, _Req1} -> {false, {}} + {{match, WS}, Req1} when WS =:= websocket orelse WS =:= rawwebsocket -> + valid_ws_request(Service, Req1); + {_Else, _Req1} -> + {false, {}} end. --spec valid_ws_request(service(), req()) -> {boolean(), - tuple()}. +-spec valid_ws_request(service(), req()) -> {boolean(), tuple()}. valid_ws_request(_Service, Req) -> R1 = valid_ws_upgrade(Req), @@ -63,31 +75,36 @@ valid_ws_request(_Service, Req) -> valid_ws_upgrade(Req) -> case sockjs_http:header(upgrade, Req) of - undefined -> false; - {V, _Req2} -> - case string:to_lower(V) of - "websocket" -> true; - _Else -> false - end + undefined -> + false; + {V, _Req2} -> + case string:to_lower(V) of + "websocket" -> true; + _Else -> false + end end. valid_ws_connection(Req) -> case sockjs_http:header(connection, Req) of - undefined -> false; - V -> - Vs = [string:strip(T) - || T <- string:tokens(string:to_lower(V), ",")], - {lists:member("upgrade", Vs), Req} + undefined -> + false; + V -> + Vs = [ + string:strip(T) + || T <- string:tokens(string:to_lower(V), ",") + ], + {lists:member("upgrade", Vs), Req} end. --spec get_action(service(), req()) -> nomatch | - {match, atom()}. +-spec get_action(service(), req()) -> + nomatch + | {match, atom()}. get_action(Service, Req) -> Dispatch = dispatch_req(Service, Req), case Dispatch of - {match, {_, Action, _, _, _}} -> {match, Action}; - _Else -> nomatch + {match, {_, Action, _, _, _}} -> {match, Action}; + _Else -> nomatch end. %% -------------------------------------------------------------------------- @@ -95,21 +112,25 @@ get_action(Service, Req) -> strip_prefix(LongPath, Prefix) -> {A, B} = lists:split(length(Prefix), LongPath), case Prefix of - A -> {ok, B}; - _Any -> - {error, - io_lib:format("Wrong prefix: ~p is not ~p", - [A, Prefix])} + A -> + {ok, B}; + _Any -> + {error, + io_lib:format( + "Wrong prefix: ~p is not ~p", + [A, Prefix] + )} end. --type dispatch_result() :: nomatch | - {match, - {send | recv | none, atom(), server(), session(), - [atom()]}} | - {bad_method, [atom()]}. +-type dispatch_result() :: + nomatch + | {match, {send | recv | none, atom(), server(), session(), [atom()]}} + | {bad_method, [atom()]}. --spec dispatch_req(service(), - req()) -> dispatch_result(). +-spec dispatch_req( + service(), + req() +) -> dispatch_result(). dispatch_req(#service{prefix = Prefix}, Req) -> Method = sockjs_http:method(Req), @@ -117,27 +138,37 @@ dispatch_req(#service{prefix = Prefix}, Req) -> {ok, PathRemainder} = strip_prefix(LongPath, Prefix), dispatch(Method, PathRemainder). --spec dispatch(atom(), - nonempty_string()) -> dispatch_result(). +-spec dispatch( + atom(), + nonempty_string() +) -> dispatch_result(). dispatch(Method, Path) -> - lists:foldl(fun ({Match, MethodFilters}, nomatch) -> - case Match(Path) of - nomatch -> nomatch; - [Server, Session] -> - case lists:keyfind(Method, 1, MethodFilters) of - false -> - Methods = [K - || {K, _, _, _} - <- MethodFilters], - {bad_method, Methods}; - {_Method, Type, A, Filters} -> - {match, {Type, A, Server, Session, Filters}} - end - end; - (_, Result) -> Result - end, - nomatch, filters()). + lists:foldl( + fun + ({Match, MethodFilters}, nomatch) -> + case Match(Path) of + nomatch -> + nomatch; + [Server, Session] -> + case lists:keyfind(Method, 1, MethodFilters) of + false -> + Methods = [ + K + || {K, _, _, _} <- + MethodFilters + ], + {bad_method, Methods}; + {_Method, Type, A, Filters} -> + {match, {Type, A, Server, Session, Filters}} + end + end; + (_, Result) -> + Result + end, + nomatch, + filters() + ). %% -------------------------------------------------------------------------- @@ -145,38 +176,39 @@ filters() -> %OptsFilters = [h_sid, xhr_cors, cache_for, xhr_options_post], %% websocket does not actually go via handle_req/3 but we need %% something in dispatch/2 - [{t("/websocket"), [{'GET', none, websocket, []}]}, - %{t("/xhr_send"), [{'POST', recv, xhr_send, [h_sid, h_no_cache, xhr_cors]}, - % {'OPTIONS', none, options, OptsFilters}]}, - %{t("/xhr"), [{'POST', send, xhr_polling, [h_sid, h_no_cache, xhr_cors]}, - % {'OPTIONS', none, options, OptsFilters}]}, - %{t("/xhr_streaming"), [{'POST', send, xhr_streaming, [h_sid, h_no_cache, xhr_cors]}, - % {'OPTIONS', none, options, OptsFilters}]}, - %{t("/jsonp_send"), [{'POST', recv, jsonp_send, [h_sid, h_no_cache]}]}, - %{t("/jsonp"), [{'GET', send, jsonp, [h_sid, h_no_cache]}]}, - %{t("/eventsource"), [{'GET', send, eventsource, [h_sid, h_no_cache]}]}, - %{t("/htmlfile"), [{'GET', send, htmlfile, [h_sid, h_no_cache]}]}, - {p("/websocket"), [{'GET', none, rawwebsocket, []}]}, - {p(""), [{'GET', none, welcome_screen, []}]}, - %{p("/iframe[0-9-.a-z_]*.html"), [{'GET', none, iframe, [cache_for]}]}, - {p("/info"), - [{'GET', none, info_test, [h_no_cache, xhr_cors]}, - {'OPTIONS', none, options, - [h_sid, xhr_cors, cache_for, xhr_options_get]}]}]. - -p(S) -> fun (Path) -> re(Path, "^" ++ S ++ "[/]?$") end. + [ + {t("/websocket"), [{'GET', none, websocket, []}]}, + %{t("/xhr_send"), [{'POST', recv, xhr_send, [h_sid, h_no_cache, xhr_cors]}, + % {'OPTIONS', none, options, OptsFilters}]}, + %{t("/xhr"), [{'POST', send, xhr_polling, [h_sid, h_no_cache, xhr_cors]}, + % {'OPTIONS', none, options, OptsFilters}]}, + %{t("/xhr_streaming"), [{'POST', send, xhr_streaming, [h_sid, h_no_cache, xhr_cors]}, + % {'OPTIONS', none, options, OptsFilters}]}, + %{t("/jsonp_send"), [{'POST', recv, jsonp_send, [h_sid, h_no_cache]}]}, + %{t("/jsonp"), [{'GET', send, jsonp, [h_sid, h_no_cache]}]}, + %{t("/eventsource"), [{'GET', send, eventsource, [h_sid, h_no_cache]}]}, + %{t("/htmlfile"), [{'GET', send, htmlfile, [h_sid, h_no_cache]}]}, + {p("/websocket"), [{'GET', none, rawwebsocket, []}]}, + {p(""), [{'GET', none, welcome_screen, []}]}, + %{p("/iframe[0-9-.a-z_]*.html"), [{'GET', none, iframe, [cache_for]}]}, + {p("/info"), [ + {'GET', none, info_test, [h_no_cache, xhr_cors]}, + {'OPTIONS', none, options, [h_sid, xhr_cors, cache_for, xhr_options_get]} + ]} + ]. + +p(S) -> fun(Path) -> re(Path, "^" ++ S ++ "[/]?$") end. t(S) -> - fun (Path) -> - re(Path, "^/([^/.]+)/([^/.]+)" ++ S ++ "[/]?$") + fun(Path) -> + re(Path, "^/([^/.]+)/([^/.]+)" ++ S ++ "[/]?$") end. re(Path, S) -> - case re:run(Path, S, [{capture, all_but_first, list}]) - of - nomatch -> nomatch; - {match, []} -> [dummy, dummy]; - {match, [Server, Session]} -> [Server, Session] + case re:run(Path, S, [{capture, all_but_first, list}]) of + nomatch -> nomatch; + {match, []} -> [dummy, dummy]; + {match, [Server, Session]} -> [Server, Session] end. %% -------------------------------------------------------------------------- @@ -191,40 +223,63 @@ handle_req(Service = #service{logger = Logger}, Req) -> handle(nomatch, _Service, Req) -> sockjs_http:reply(404, [], "", Req); handle({bad_method, Methods}, _Service, Req) -> - MethodsStr = string:join([atom_to_list(M) - || M <- Methods], - ", "), + MethodsStr = string:join( + [ + atom_to_list(M) + || M <- Methods + ], + ", " + ), H = [{"Allow", MethodsStr}], sockjs_http:reply(405, H, "", Req); -handle({match, - {Type, Action, _Server, Session, Filters}}, - Service, Req) -> - {Headers, Req2} = lists:foldl(fun (Filter, - {Headers0, Req1}) -> - sockjs_filters:Filter(Req1, Headers0) - end, - {[], Req}, Filters), +handle( + {match, {Type, Action, _Server, Session, Filters}}, + Service, + Req +) -> + {Headers, Req2} = lists:foldl( + fun( + Filter, + {Headers0, Req1} + ) -> + sockjs_filters:Filter(Req1, Headers0) + end, + {[], Req}, + Filters + ), case Type of - send -> - Info = extract_info(Req2), - _SPid = sockjs_session:maybe_create(Session, Service, - Info), - sockjs_action:Action(Req2, Headers, Service, Session); - recv -> - try sockjs_action:Action(Req2, Headers, Service, - Session) - catch - no_session -> - H = sockjs_filters:h_sid(Req2, []), - sockjs_http:reply(404, H, "", Req2) - end; - none -> sockjs_action:Action(Req2, Headers, Service) + send -> + Info = extract_info(Req2), + _SPid = sockjs_session:maybe_create( + Session, + Service, + Info + ), + sockjs_action:Action(Req2, Headers, Service, Session); + recv -> + try + sockjs_action:Action( + Req2, + Headers, + Service, + Session + ) + catch + no_session -> + H = sockjs_filters:h_sid(Req2, []), + sockjs_http:reply(404, H, "", Req2) + end; + none -> + sockjs_action:Action(Req2, Headers, Service) end. %% -------------------------------------------------------------------------- --spec default_logger(service(), req(), - websocket | http) -> no_return(). +-spec default_logger( + service(), + req(), + websocket | http +) -> no_return(). default_logger(_Service, Req, _Type) -> % As the service need a function to call as default, we simply give a dummy function @@ -239,14 +294,26 @@ extract_info(Req) -> Peer = sockjs_http:peername(Req), Sock = sockjs_http:sockname(Req), Path = sockjs_http:path(Req), - Headers = lists:foldl(fun (H, Acc) -> - case sockjs_http:header(H, Req) of - undefined -> Acc; - V -> [{H, V} | Acc] - end - end, - [], - [referer, 'x-client-ip', 'x-forwarded-for', - 'x-cluster-client-ip', via, 'x-real-ip']), - [{peername, Peer}, {sockname, Sock}, {path, Path}, - {headers, Headers}]. + Headers = lists:foldl( + fun(H, Acc) -> + case sockjs_http:header(H, Req) of + undefined -> Acc; + V -> [{H, V} | Acc] + end + end, + [], + [ + referer, + 'x-client-ip', + 'x-forwarded-for', + 'x-cluster-client-ip', + via, + 'x-real-ip' + ] + ), + [ + {peername, Peer}, + {sockname, Sock}, + {path, Path}, + {headers, Headers} + ]. diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl index a89ea14..819ded8 100644 --- a/src/sockjs_http.erl +++ b/src/sockjs_http.erl @@ -1,13 +1,24 @@ -module(sockjs_http). --export([body/1, body_qs/1, callback/1, header/2, - jsessionid/1, method/1, path/1, peername/1, - sockname/1]). +-export([ + body/1, + body_qs/1, + callback/1, + header/2, + jsessionid/1, + method/1, + path/1, + peername/1, + sockname/1 +]). -export([chunk/2, chunk_end/1, chunk_start/3, reply/4]). --export([abruptly_kill/1, hook_tcp_close/1, - unhook_tcp_close/1]). +-export([ + abruptly_kill/1, + hook_tcp_close/1, + unhook_tcp_close/1 +]). -include("sockjs_internal.hrl"). @@ -16,12 +27,14 @@ -spec path(req()) -> string(). path({cowboy, Req}) -> - Path = cowboy_req:path(Req), binary_to_list(Path). + Path = cowboy_req:path(Req), + binary_to_list(Path). -spec method(req()) -> atom(). method({cowboy, Req}) -> - Method = cowboy_req:method(Req), method_atom(Method). + Method = cowboy_req:method(Req), + method_atom(Method). -spec method_atom(binary() | atom()) -> atom(). @@ -48,10 +61,10 @@ body({cowboy, Req}) -> body({cowboy, Req}, Acc) -> case cowboy_req:read_body(Req) of - {ok, Data, Req} -> - {ok, <>, Req}; - {more, Data, Req} -> - body(Req, <>) + {ok, Data, Req} -> + {ok, <>, Req}; + {more, Data, Req} -> + body(Req, <>) end. -spec body_qs(req()) -> {binary(), req()}. @@ -59,97 +72,116 @@ body({cowboy, Req}, Acc) -> body_qs(Req) -> {H, Req1} = header('content-type', Req), case H of - H when H =:= "text/plain" orelse H =:= "" -> body(Req1); - _ -> - %% By default assume application/x-www-form-urlencoded - body_qs2(Req1) + H when H =:= "text/plain" orelse H =:= "" -> + body(Req1); + _ -> + %% By default assume application/x-www-form-urlencoded + body_qs2(Req1) end. body_qs2({cowboy, Req}) -> {ok, BodyQS, Req1} = - cowboy_req:read_urlencoded_body(Req), + cowboy_req:read_urlencoded_body(Req), case proplists:get_value(<<"d">>, BodyQS) of - undefined -> {<<>>, {cowboy, Req1}}; - V -> {V, {cowboy, Req1}} + undefined -> {<<>>, {cowboy, Req1}}; + V -> {V, {cowboy, Req1}} end. --spec header(atom(), req()) -> {nonempty_string() | - undefined, - req()}. +-spec header(atom(), req()) -> + {nonempty_string() + | undefined, + req()}. header(K, {cowboy, Req}) -> H = cowboy_req:header(K, Req), - V = case H of - undefined -> - cowboy_req:header(atom_to_binary(K, utf8), Req); - _ -> H - end, + V = + case H of + undefined -> + cowboy_req:header(atom_to_binary(K, utf8), Req); + _ -> + H + end, case V of - undefined -> undefined; - _ -> binary_to_list(V) + undefined -> undefined; + _ -> binary_to_list(V) end. --spec jsessionid(req()) -> {nonempty_string() | - undefined, - req()}. +-spec jsessionid(req()) -> + {nonempty_string() + | undefined, + req()}. jsessionid({cowboy, Req}) -> - #{'JSESSIONID' := C} = cowboy_req:cookie([{'JSESSIONID', - [], undefined}], - Req), + #{'JSESSIONID' := C} = cowboy_req:cookie( + [{'JSESSIONID', [], undefined}], + Req + ), case C of - _ when is_binary(C) -> {binary_to_list(C), cowboy}; - undefined -> {undefined, cowboy} + _ when is_binary(C) -> {binary_to_list(C), cowboy}; + undefined -> {undefined, cowboy} end. --spec callback(req()) -> {nonempty_string() | undefined, - req()}. +-spec callback(req()) -> {nonempty_string() | undefined, req()}. callback({cowboy, Req}) -> {CB, Req1} = cowboy_req:qs_val(<<"c">>, Req), case CB of - undefined -> {undefined, {cowboy, Req1}}; - _ -> {binary_to_list(CB), {cowboy, Req1}} + undefined -> {undefined, {cowboy, Req1}}; + _ -> {binary_to_list(CB), {cowboy, Req1}} end. --spec peername(req()) -> {inet:ip_address(), - non_neg_integer()}. +-spec peername(req()) -> {inet:ip_address(), non_neg_integer()}. peername({cowboy, Req}) -> cowboy_req:peer(Req). --spec sockname(req()) -> {inet:ip_address(), - non_neg_integer()}. +-spec sockname(req()) -> {inet:ip_address(), non_neg_integer()}. sockname({cowboy, Req}) -> cowboy_req:peer(Req). %% -------------------------------------------------------------------------- --spec reply(non_neg_integer(), headers(), iodata(), - req()) -> req(). +-spec reply( + non_neg_integer(), + headers(), + iodata(), + req() +) -> req(). reply(Code, Headers, Body, {cowboy, Req}) -> Body1 = iolist_to_binary(Body), - {ok, Req1} = cowboy_req:reply(Code, enbinary(Headers), - Body1, Req), + {ok, Req1} = cowboy_req:reply( + Code, + enbinary(Headers), + Body1, + Req + ), {cowboy, Req1}. --spec chunk_start(non_neg_integer(), headers(), - req()) -> req(). +-spec chunk_start( + non_neg_integer(), + headers(), + req() +) -> req(). chunk_start(Code, Headers, {cowboy, Req}) -> - {ok, Req1} = cowboy_req:chunked_reply(Code, - enbinary(Headers), Req), + {ok, Req1} = cowboy_req:chunked_reply( + Code, + enbinary(Headers), + Req + ), {cowboy, Req1}. -spec chunk(iodata(), req()) -> {ok | error, req()}. chunk(Chunk, {cowboy, Req} = R) -> case cowboy_req:chunk(Chunk, Req) of - ok -> {ok, R}; - {error, _E} -> - {error, - R} %% This shouldn't happen too often, usually we - %% should catch tco socket closure before. + ok -> + {ok, R}; + {error, _E} -> + {error, + %% This shouldn't happen too often, usually we + R} + %% should catch tco socket closure before. end. -spec chunk_end(req()) -> req(). From d4e7642dd61fc7771bd204321d6bca724e8614fe Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 17:50:52 +0700 Subject: [PATCH 6/9] remove warning --- src/sockjs_filters.erl | 2 +- src/sockjs_handler.erl | 2 +- src/sockjs_session.erl | 2 +- src/sockjs_util.erl | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sockjs_filters.erl b/src/sockjs_filters.erl index 345971e..5c32672 100644 --- a/src/sockjs_filters.erl +++ b/src/sockjs_filters.erl @@ -20,7 +20,7 @@ cache_for(Req, Headers) -> Expires = calendar:gregorian_seconds_to_datetime( - calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(now())) + + calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(erlang:timestamp())) + (?YEAR) ), H = [ diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl index 3745e4c..912432a 100644 --- a/src/sockjs_handler.erl +++ b/src/sockjs_handler.erl @@ -281,7 +281,7 @@ handle( websocket | http ) -> no_return(). -default_logger(_Service, Req, _Type) -> +default_logger(_Service, _Req, _Type) -> % As the service need a function to call as default, we simply give a dummy function % LongPath = sockjs_http:path(Req), % Method = sockjs_http:method(Req). diff --git a/src/sockjs_session.erl b/src/sockjs_session.erl index 8087436..db66dfe 100644 --- a/src/sockjs_session.erl +++ b/src/sockjs_session.erl @@ -370,7 +370,7 @@ handle_info(session_timeout, State = #session{response_pid = undefined}) -> handle_info(heartbeat_triggered, State = #session{response_pid = RPid}) when RPid =/= undefined -> RPid ! go, {noreply, State#session{heartbeat_tref = triggered}}; -handle_info(Info, State) -> +handle_info(_Info, State) -> % stop, {odd_info, Info}, State}. {noreply, State}. diff --git a/src/sockjs_util.erl b/src/sockjs_util.erl index be220bb..66a741a 100644 --- a/src/sockjs_util.erl +++ b/src/sockjs_util.erl @@ -15,13 +15,13 @@ rand32() -> case get(random_seeded) of undefined -> - {MegaSecs, Secs, MicroSecs} = now(), - _ = random:seed(MegaSecs, Secs, MicroSecs), + {MegaSecs, Secs, MicroSecs} = erlang:timestamp(), + _ = rand:seed(MegaSecs, Secs, MicroSecs), put(random_seeded, true); _Else -> ok end, - random:uniform(erlang:trunc(math:pow(2, 32))) - 1. + rand:uniform(erlang:trunc(math:pow(2, 32))) - 1. -spec encode_frame(frame()) -> iodata(). From f599a455c0ce8d6b89ed6c6280b8bf02c9c4dca0 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 19:40:00 +0700 Subject: [PATCH 7/9] fix bug --- src/sockjs_cowboy_handler.erl | 2 +- src/sockjs_handler.erl | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index 93efc49..ae9e258 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -20,7 +20,7 @@ init(#{ref := http} = Req, Service) -> case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of {true, _Reason} -> - {upgrade, protocol, cowboy_websocket}; + {cowboy_websocket, Req, Service}; {false, _Reason} -> {ok, Req, Service} end. diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl index 912432a..5bdc250 100644 --- a/src/sockjs_handler.erl +++ b/src/sockjs_handler.erl @@ -60,9 +60,9 @@ init_state(Prefix, Callback, State, Options) -> is_valid_ws(Service, Req) -> case get_action(Service, Req) of - {{match, WS}, Req1} when WS =:= websocket orelse WS =:= rawwebsocket -> - valid_ws_request(Service, Req1); - {_Else, _Req1} -> + {match, WS} when WS =:= websocket orelse WS =:= rawwebsocket -> + valid_ws_request(Service, Req); + _Else -> {false, {}} end. @@ -77,7 +77,7 @@ valid_ws_upgrade(Req) -> case sockjs_http:header(upgrade, Req) of undefined -> false; - {V, _Req2} -> + V -> case string:to_lower(V) of "websocket" -> true; _Else -> false @@ -93,7 +93,7 @@ valid_ws_connection(Req) -> string:strip(T) || T <- string:tokens(string:to_lower(V), ",") ], - {lists:member("upgrade", Vs), Req} + lists:member("upgrade", Vs) end. -spec get_action(service(), req()) -> From 4b709c71cb23fad526d5586c51947fed0f258b28 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Mon, 8 Feb 2021 20:17:28 +0700 Subject: [PATCH 8/9] broken change on websocket handler --- src/sockjs_cowboy_handler.erl | 98 ++++++----------------------------- 1 file changed, 16 insertions(+), 82 deletions(-) diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index ae9e258..4083283 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -7,10 +7,10 @@ %% Cowboy ws callbacks -export([ - websocket_handle/3, - websocket_info/3, - websocket_init/3, - websocket_terminate/3 + websocket_handle/2, + websocket_info/2, + websocket_init/2, + websocket_terminate/2 ]). -include("sockjs_internal.hrl"). @@ -31,59 +31,13 @@ terminate(_Reason, _Req, _Service) -> ok. websocket_init( _TransportName, - Req, - Service = #service{ - logger = Logger, - subproto_pref = SubProtocolPref - } + _Service ) -> - Req1 = - case - cowboy_req:header( - 'Sec-Websocket-Protocol', - Req - ) - of - undefined -> - Req; - SubProtocols -> - SelectedSubProtocol = - choose_subprotocol_bin(SubProtocols, SubProtocolPref), - cowboy_req:set_resp_header( - #{ - <<"Sec-Websocket-Protocol">> => - SelectedSubProtocol - }, - Req - ) - end, - Logger(Service, {cowboy, Req1}, websocket), - Service1 = Service#service{ - disconnect_delay = - 5 * 60 * 1000 - }, - Info = sockjs_handler:extract_info(Req1), - SessionPid = sockjs_session:maybe_create( - undefined, - Service1, - Info - ), - RawWebsocket = - case - sockjs_handler:get_action( - Service, - Req1 - ) - of - {match, WS} when WS =:= websocket orelse WS =:= rawwebsocket -> - WS - end, self() ! go, - {ok, Req1, {RawWebsocket, SessionPid}}. + {ok, {}}. websocket_handle( {text, Data}, - Req, {RawWebsocket, SessionPid} = S ) -> case @@ -93,56 +47,36 @@ websocket_handle( Data ) of - ok -> {ok, Req, S}; - shutdown -> {shutdown, Req, S} + ok -> {ok, S}; + shutdown -> {stop, S} end; -websocket_handle(_Unknown, Req, S) -> - {shutdown, Req, S}. +websocket_handle(_Unknown, S) -> + {stop, S}. websocket_info( go, - Req, {RawWebsocket, SessionPid} = S ) -> case sockjs_ws_handler:reply(RawWebsocket, SessionPid) of wait -> - {ok, Req, S}; + {ok, S}; {ok, Data} -> self() ! go, - {reply, {text, Data}, Req, S}; + {reply, {text, Data}, S}; {close, <<>>} -> - {shutdown, Req, S}; + {stop, S}; {close, Data} -> self() ! shutdown, - {reply, {text, Data}, Req, S} + {reply, {text, Data}, S} end; -websocket_info(shutdown, Req, S) -> - {shutdown, Req, S}. +websocket_info(shutdown, S) -> + {stop, S}. websocket_terminate( _Reason, - _Req, {RawWebsocket, SessionPid} ) -> sockjs_ws_handler:close(RawWebsocket, SessionPid), ok. %% -------------------------------------------------------------------------- - -choose_subprotocol_bin(SubProtocols, Pref) -> - choose_subprotocol(re:split(SubProtocols, ", *"), Pref). - -choose_subprotocol(SubProtocols, undefined) -> - erlang:hd(lists:reverse(lists:sort(SubProtocols))); -choose_subprotocol(SubProtocols, Pref) -> - case - lists:filter( - fun(E) -> - lists:member(E, SubProtocols) - end, - Pref - ) - of - [Hd | _] -> Hd; - [] -> choose_subprotocol(SubProtocols, undefined) - end. From e9c5321d042e2854ee11404fedb1a9fde7dc85e0 Mon Sep 17 00:00:00 2001 From: "alpha.ferry" Date: Wed, 10 Feb 2021 20:36:04 +0700 Subject: [PATCH 9/9] fix bug --- src/sockjs_cowboy_handler.erl | 25 +++++++++++++++---------- src/sockjs_http.erl | 2 +- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl index 4083283..51a9ae8 100644 --- a/src/sockjs_cowboy_handler.erl +++ b/src/sockjs_cowboy_handler.erl @@ -9,7 +9,7 @@ -export([ websocket_handle/2, websocket_info/2, - websocket_init/2, + websocket_init/1, websocket_terminate/2 ]). @@ -20,9 +20,17 @@ init(#{ref := http} = Req, Service) -> case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of {true, _Reason} -> - {cowboy_websocket, Req, Service}; + Service1 = Service#service{disconnect_delay = 5 * 60 * 1000}, + Info = sockjs_handler:extract_info({cowboy, Req}), + SessionPid = sockjs_session:maybe_create(undefined, Service1, Info), + RawWebSocket = + case sockjs_handler:get_action(Service, {cowboy, Req}) of + {match, WS} when WS =:= websocket orelse WS =:= rawwebsocket -> WS + end, + + {cowboy_websocket, Req, {RawWebSocket, SessionPid}}; {false, _Reason} -> - {ok, Req, Service} + {ok, Req, {}} end. terminate(_Reason, _Req, _Service) -> ok. @@ -30,11 +38,10 @@ terminate(_Reason, _Req, _Service) -> ok. %% -------------------------------------------------------------------------- websocket_init( - _TransportName, - _Service + S ) -> self() ! go, - {ok, {}}. + {ok, S}. websocket_handle( {text, Data}, @@ -47,7 +54,7 @@ websocket_handle( Data ) of - ok -> {ok, S}; + ok -> {[], S}; shutdown -> {stop, S} end; websocket_handle(_Unknown, S) -> @@ -59,7 +66,7 @@ websocket_info( ) -> case sockjs_ws_handler:reply(RawWebsocket, SessionPid) of wait -> - {ok, S}; + {[], S}; {ok, Data} -> self() ! go, {reply, {text, Data}, S}; @@ -78,5 +85,3 @@ websocket_terminate( ) -> sockjs_ws_handler:close(RawWebsocket, SessionPid), ok. - -%% -------------------------------------------------------------------------- diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl index 819ded8..20fa4e6 100644 --- a/src/sockjs_http.erl +++ b/src/sockjs_http.erl @@ -136,7 +136,7 @@ peername({cowboy, Req}) -> cowboy_req:peer(Req). -spec sockname(req()) -> {inet:ip_address(), non_neg_integer()}. -sockname({cowboy, Req}) -> cowboy_req:peer(Req). +sockname({cowboy, Req}) -> cowboy_req:sock(Req). %% --------------------------------------------------------------------------