diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..17278c0
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+*.beam
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..919deb1
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,32 @@
+MARKDOWN_SOURCES=$(wildcard doc/*.md)
+MARKDOWN_TARGETS=$(patsubst doc/%.md,doc/html/%.html,$(MARKDOWN_SOURCES))
+
+all: ebin
+ (cd src;$(MAKE))
+
+docs: html-docs
+
+html-docs: doc/html $(MARKDOWN_TARGETS)
+
+doc/html:
+ mkdir -p doc/html
+
+doc/html/%.html: doc/%.md
+ (title=`grep '^# ' $< | head -1 | sed -e 's:^# ::'` ;\
+ t=/tmp/$*.md ;\
+ sed -e "s:@TITLE@:$$title:g" < doc/header.html > $@ ;\
+ python doc/buildtoc.py < $< > $$t ;\
+ markdown $$t >> $@ ;\
+ rm $$t ;\
+ cat doc/footer.html >> $@)
+
+ebin:
+ mkdir -p ebin
+
+clean: clean-docs
+ (cd src;$(MAKE) clean)
+
+clean-docs: clean-html
+
+clean-html:
+ rm -rf doc/html
diff --git a/NOTES b/NOTES
new file mode 100644
index 0000000..e69de29
diff --git a/deps/mochiweb-r88/LICENSE b/deps/mochiweb-r88/LICENSE
new file mode 100644
index 0000000..c85b65a
--- /dev/null
+++ b/deps/mochiweb-r88/LICENSE
@@ -0,0 +1,9 @@
+This is the MIT license.
+
+Copyright (c) 2007 Mochi Media, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/mochiweb-r88/Makefile b/deps/mochiweb-r88/Makefile
new file mode 100644
index 0000000..72f4b80
--- /dev/null
+++ b/deps/mochiweb-r88/Makefile
@@ -0,0 +1,8 @@
+all:
+ (cd src;$(MAKE))
+
+test:
+ (cd src;$(MAKE) test)
+
+clean:
+ (cd src;$(MAKE) clean)
diff --git a/deps/mochiweb-r88/README b/deps/mochiweb-r88/README
new file mode 100644
index 0000000..f5d2c0d
--- /dev/null
+++ b/deps/mochiweb-r88/README
@@ -0,0 +1 @@
+MochiWeb is an Erlang library for building lightweight HTTP servers.
diff --git a/deps/mochiweb-r88/ebin/mochifmt.beam b/deps/mochiweb-r88/ebin/mochifmt.beam
new file mode 100644
index 0000000..396530f
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochifmt.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochifmt_records.beam b/deps/mochiweb-r88/ebin/mochifmt_records.beam
new file mode 100644
index 0000000..a0a56fc
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochifmt_records.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochifmt_std.beam b/deps/mochiweb-r88/ebin/mochifmt_std.beam
new file mode 100644
index 0000000..e94d9bb
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochifmt_std.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochihex.beam b/deps/mochiweb-r88/ebin/mochihex.beam
new file mode 100644
index 0000000..2c45525
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochihex.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochijson.beam b/deps/mochiweb-r88/ebin/mochijson.beam
new file mode 100644
index 0000000..8e65032
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochijson.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochijson2.beam b/deps/mochiweb-r88/ebin/mochijson2.beam
new file mode 100644
index 0000000..c07c9ee
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochijson2.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochinum.beam b/deps/mochiweb-r88/ebin/mochinum.beam
new file mode 100644
index 0000000..c936b7e
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochinum.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb.app b/deps/mochiweb-r88/ebin/mochiweb.app
new file mode 100644
index 0000000..cd8dbb2
--- /dev/null
+++ b/deps/mochiweb-r88/ebin/mochiweb.app
@@ -0,0 +1,32 @@
+{application, mochiweb,
+ [{description, "MochiMedia Web Server"},
+ {vsn, "0.01"},
+ {modules, [
+ mochihex,
+ mochijson,
+ mochijson2,
+ mochinum,
+ mochiweb,
+ mochiweb_app,
+ mochiweb_charref,
+ mochiweb_cookies,
+ mochiweb_echo,
+ mochiweb_headers,
+ mochiweb_html,
+ mochiweb_http,
+ mochiweb_multipart,
+ mochiweb_request,
+ mochiweb_response,
+ mochiweb_skel,
+ mochiweb_socket_server,
+ mochiweb_sup,
+ mochiweb_util,
+ reloader,
+ mochifmt,
+ mochifmt_std,
+ mochifmt_records
+ ]},
+ {registered, []},
+ {mod, {mochiweb_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib]}]}.
diff --git a/deps/mochiweb-r88/ebin/mochiweb.beam b/deps/mochiweb-r88/ebin/mochiweb.beam
new file mode 100644
index 0000000..c26a385
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_app.beam b/deps/mochiweb-r88/ebin/mochiweb_app.beam
new file mode 100644
index 0000000..324c0a7
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_app.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_charref.beam b/deps/mochiweb-r88/ebin/mochiweb_charref.beam
new file mode 100644
index 0000000..9a6d837
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_charref.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_cookies.beam b/deps/mochiweb-r88/ebin/mochiweb_cookies.beam
new file mode 100644
index 0000000..1b0f0eb
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_cookies.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_echo.beam b/deps/mochiweb-r88/ebin/mochiweb_echo.beam
new file mode 100644
index 0000000..4fa087b
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_echo.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_headers.beam b/deps/mochiweb-r88/ebin/mochiweb_headers.beam
new file mode 100644
index 0000000..5d83696
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_headers.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_html.beam b/deps/mochiweb-r88/ebin/mochiweb_html.beam
new file mode 100644
index 0000000..3e76e6a
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_html.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_http.beam b/deps/mochiweb-r88/ebin/mochiweb_http.beam
new file mode 100644
index 0000000..9e56e2e
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_http.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_multipart.beam b/deps/mochiweb-r88/ebin/mochiweb_multipart.beam
new file mode 100644
index 0000000..96a5011
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_multipart.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_request.beam b/deps/mochiweb-r88/ebin/mochiweb_request.beam
new file mode 100644
index 0000000..978a5b3
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_request.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_response.beam b/deps/mochiweb-r88/ebin/mochiweb_response.beam
new file mode 100644
index 0000000..8ec96a3
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_response.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_skel.beam b/deps/mochiweb-r88/ebin/mochiweb_skel.beam
new file mode 100644
index 0000000..bab67e1
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_skel.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_socket_server.beam b/deps/mochiweb-r88/ebin/mochiweb_socket_server.beam
new file mode 100644
index 0000000..65b7c5e
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_socket_server.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_sup.beam b/deps/mochiweb-r88/ebin/mochiweb_sup.beam
new file mode 100644
index 0000000..1b4537c
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_sup.beam differ
diff --git a/deps/mochiweb-r88/ebin/mochiweb_util.beam b/deps/mochiweb-r88/ebin/mochiweb_util.beam
new file mode 100644
index 0000000..d09e2d9
Binary files /dev/null and b/deps/mochiweb-r88/ebin/mochiweb_util.beam differ
diff --git a/deps/mochiweb-r88/ebin/reloader.beam b/deps/mochiweb-r88/ebin/reloader.beam
new file mode 100644
index 0000000..aa840f2
Binary files /dev/null and b/deps/mochiweb-r88/ebin/reloader.beam differ
diff --git a/deps/mochiweb-r88/priv/skel/Makefile b/deps/mochiweb-r88/priv/skel/Makefile
new file mode 100644
index 0000000..7b44214
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/Makefile
@@ -0,0 +1,5 @@
+all:
+ (cd src;$(MAKE))
+
+clean:
+ (cd src;$(MAKE) clean)
diff --git a/deps/mochiweb-r88/priv/skel/priv/www/index.html b/deps/mochiweb-r88/priv/skel/priv/www/index.html
new file mode 100644
index 0000000..8e7a2c6
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/priv/www/index.html
@@ -0,0 +1,8 @@
+
+
+It Worked
+
+
+MochiWeb running.
+
+
diff --git a/deps/mochiweb-r88/priv/skel/src/Makefile b/deps/mochiweb-r88/priv/skel/src/Makefile
new file mode 100644
index 0000000..7a44a74
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/Makefile
@@ -0,0 +1,9 @@
+include ../support/include.mk
+
+all: $(EBIN_FILES)
+
+debug:
+ $(MAKE) DEBUG=-DDEBUG
+
+clean:
+ rm -rf $(EBIN_FILES)
diff --git a/deps/mochiweb-r88/priv/skel/src/skel.app b/deps/mochiweb-r88/priv/skel/src/skel.app
new file mode 100644
index 0000000..fc16aae
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel.app
@@ -0,0 +1,14 @@
+{application, skel,
+ [{description, "skel"},
+ {vsn, "0.01"},
+ {modules, [
+ skel,
+ skel_app,
+ skel_sup,
+ skel_web,
+ skel_deps
+ ]},
+ {registered, []},
+ {mod, {skel_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib, crypto]}]}.
diff --git a/deps/mochiweb-r88/priv/skel/src/skel.erl b/deps/mochiweb-r88/priv/skel/src/skel.erl
new file mode 100644
index 0000000..49263c8
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel.erl
@@ -0,0 +1,30 @@
+%% @author author
+%% @copyright YYYY author.
+
+%% @doc TEMPLATE.
+
+-module(skel).
+-author('author ').
+-export([start/0, stop/0]).
+
+ensure_started(App) ->
+ case application:start(App) of
+ ok ->
+ ok;
+ {error, {already_started, App}} ->
+ ok
+ end.
+
+%% @spec start() -> ok
+%% @doc Start the skel server.
+start() ->
+ skel_deps:ensure(),
+ ensure_started(crypto),
+ application:start(skel).
+
+%% @spec stop() -> ok
+%% @doc Stop the skel server.
+stop() ->
+ Res = application:stop(skel),
+ application:stop(crypto),
+ Res.
diff --git a/deps/mochiweb-r88/priv/skel/src/skel.hrl b/deps/mochiweb-r88/priv/skel/src/skel.hrl
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel.hrl
@@ -0,0 +1 @@
+
diff --git a/deps/mochiweb-r88/priv/skel/src/skel_app.erl b/deps/mochiweb-r88/priv/skel/src/skel_app.erl
new file mode 100644
index 0000000..6b8228e
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel_app.erl
@@ -0,0 +1,22 @@
+%% @author author
+%% @copyright YYYY author.
+
+%% @doc Callbacks for the skel application.
+
+-module(skel_app).
+-author('author ').
+
+-behaviour(application).
+-export([start/2,stop/1]).
+
+
+%% @spec start(_Type, _StartArgs) -> ServerRet
+%% @doc application start callback for skel.
+start(_Type, _StartArgs) ->
+ skel_deps:ensure(),
+ skel_sup:start_link().
+
+%% @spec stop(_State) -> ServerRet
+%% @doc application stop callback for skel.
+stop(_State) ->
+ ok.
diff --git a/deps/mochiweb-r88/priv/skel/src/skel_deps.erl b/deps/mochiweb-r88/priv/skel/src/skel_deps.erl
new file mode 100644
index 0000000..b53552d
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel_deps.erl
@@ -0,0 +1,84 @@
+%% @author author
+%% @copyright YYYY author.
+
+%% @doc Ensure that the relatively-installed dependencies are on the code
+%% loading path, and locate resources relative
+%% to this application's path.
+
+-module(skel_deps).
+-author('author ').
+
+-export([ensure/0, ensure/1]).
+-export([get_base_dir/0, get_base_dir/1]).
+-export([local_path/1, local_path/2]).
+-export([deps_on_path/0, new_siblings/1]).
+
+%% @spec deps_on_path() -> [ProjNameAndVers]
+%% @doc List of project dependencies on the path.
+deps_on_path() ->
+ F = fun (X, Acc) ->
+ ProjDir = filename:dirname(X),
+ case {filename:basename(X),
+ filename:basename(filename:dirname(ProjDir))} of
+ {"ebin", "deps"} ->
+ [filename:basename(ProjDir) | Acc];
+ _ ->
+ Acc
+ end
+ end,
+ ordsets:from_list(lists:foldl(F, [], code:get_path())).
+
+%% @spec new_siblings(Module) -> [Dir]
+%% @doc Find new siblings paths relative to Module that aren't already on the
+%% code path.
+new_siblings(Module) ->
+ Existing = deps_on_path(),
+ SiblingEbin = filelib:wildcard(local_path(["deps", "*", "ebin"], Module)),
+ Siblings = [filename:dirname(X) || X <- SiblingEbin,
+ ordsets:is_element(
+ filename:basename(filename:dirname(X)),
+ Existing) =:= false],
+ lists:filter(fun filelib:is_dir/1,
+ lists:append([[filename:join([X, "ebin"]),
+ filename:join([X, "include"])] ||
+ X <- Siblings])).
+
+
+%% @spec ensure(Module) -> ok
+%% @doc Ensure that all ebin and include paths for dependencies
+%% of the application for Module are on the code path.
+ensure(Module) ->
+ code:add_paths(new_siblings(Module)),
+ code:clash(),
+ ok.
+
+%% @spec ensure() -> ok
+%% @doc Ensure that the ebin and include paths for dependencies of
+%% this application are on the code path. Equivalent to
+%% ensure(?Module).
+ensure() ->
+ ensure(?MODULE).
+
+%% @spec get_base_dir(Module) -> string()
+%% @doc Return the application directory for Module. It assumes Module is in
+%% a standard OTP layout application in the ebin or src directory.
+get_base_dir(Module) ->
+ {file, Here} = code:is_loaded(Module),
+ filename:dirname(filename:dirname(Here)).
+
+%% @spec get_base_dir() -> string()
+%% @doc Return the application directory for this application. Equivalent to
+%% get_base_dir(?MODULE).
+get_base_dir() ->
+ get_base_dir(?MODULE).
+
+%% @spec local_path([string()], Module) -> string()
+%% @doc Return an application-relative directory from Module's application.
+local_path(Components, Module) ->
+ filename:join([get_base_dir(Module) | Components]).
+
+%% @spec local_path(Components) -> string()
+%% @doc Return an application-relative directory for this application.
+%% Equivalent to local_path(Components, ?MODULE).
+local_path(Components) ->
+ local_path(Components, ?MODULE).
diff --git a/deps/mochiweb-r88/priv/skel/src/skel_sup.erl b/deps/mochiweb-r88/priv/skel/src/skel_sup.erl
new file mode 100644
index 0000000..92e0cae
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel_sup.erl
@@ -0,0 +1,54 @@
+%% @author author
+%% @copyright YYYY author.
+
+%% @doc Supervisor for the skel application.
+
+-module(skel_sup).
+-author('author ').
+
+-behaviour(supervisor).
+
+%% External exports
+-export([start_link/0, upgrade/0]).
+
+%% supervisor callbacks
+-export([init/1]).
+
+%% @spec start_link() -> ServerRet
+%% @doc API for starting the supervisor.
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%% @spec upgrade() -> ok
+%% @doc Add processes if necessary.
+upgrade() ->
+ {ok, {_, Specs}} = init([]),
+
+ Old = sets:from_list(
+ [Name || {Name, _, _, _} <- supervisor:which_children(?MODULE)]),
+ New = sets:from_list([Name || {Name, _, _, _, _, _} <- Specs]),
+ Kill = sets:subtract(Old, New),
+
+ sets:fold(fun (Id, ok) ->
+ supervisor:terminate_child(?MODULE, Id),
+ supervisor:delete_child(?MODULE, Id),
+ ok
+ end, ok, Kill),
+
+ [supervisor:start_child(?MODULE, Spec) || Spec <- Specs],
+ ok.
+
+%% @spec init([]) -> SupervisorTree
+%% @doc supervisor callback.
+init([]) ->
+ Ip = case os:getenv("MOCHIWEB_IP") of false -> "0.0.0.0"; Any -> Any end,
+ WebConfig = [
+ {ip, Ip},
+ {port, 8000},
+ {docroot, skel_deps:local_path(["priv", "www"])}],
+ Web = {skel_web,
+ {skel_web, start, [WebConfig]},
+ permanent, 5000, worker, dynamic},
+
+ Processes = [Web],
+ {ok, {{one_for_one, 10, 10}, Processes}}.
diff --git a/deps/mochiweb-r88/priv/skel/src/skel_web.erl b/deps/mochiweb-r88/priv/skel/src/skel_web.erl
new file mode 100644
index 0000000..3679670
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/src/skel_web.erl
@@ -0,0 +1,43 @@
+%% @author author
+%% @copyright YYYY author.
+
+%% @doc Web server for skel.
+
+-module(skel_web).
+-author('author ').
+
+-export([start/1, stop/0, loop/2]).
+
+%% External API
+
+start(Options) ->
+ {DocRoot, Options1} = get_option(docroot, Options),
+ Loop = fun (Req) ->
+ ?MODULE:loop(Req, DocRoot)
+ end,
+ mochiweb_http:start([{name, ?MODULE}, {loop, Loop} | Options1]).
+
+stop() ->
+ mochiweb_http:stop(?MODULE).
+
+loop(Req, DocRoot) ->
+ "/" ++ Path = Req:get(path),
+ case Req:get(method) of
+ Method when Method =:= 'GET'; Method =:= 'HEAD' ->
+ case Path of
+ _ ->
+ Req:serve_file(Path, DocRoot)
+ end;
+ 'POST' ->
+ case Path of
+ _ ->
+ Req:not_found()
+ end;
+ _ ->
+ Req:respond({501, [], []})
+ end.
+
+%% Internal API
+
+get_option(Option, Options) ->
+ {proplists:get_value(Option, Options), proplists:delete(Option, Options)}.
diff --git a/deps/mochiweb-r88/priv/skel/start-dev.sh b/deps/mochiweb-r88/priv/skel/start-dev.sh
new file mode 100755
index 0000000..6f02962
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/start-dev.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+cd `dirname $0`
+exec erl -pa $PWD/ebin $PWD/deps/*/ebin -boot start_sasl -s reloader -s skel
diff --git a/deps/mochiweb-r88/priv/skel/start.sh b/deps/mochiweb-r88/priv/skel/start.sh
new file mode 100755
index 0000000..599f8e6
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/start.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+cd `dirname $0`
+exec erl -pa $PWD/ebin $PWD/deps/*/ebin -boot start_sasl -s skel
diff --git a/deps/mochiweb-r88/priv/skel/support/include.mk b/deps/mochiweb-r88/priv/skel/support/include.mk
new file mode 100644
index 0000000..64b50fe
--- /dev/null
+++ b/deps/mochiweb-r88/priv/skel/support/include.mk
@@ -0,0 +1,46 @@
+## -*- makefile -*-
+
+######################################################################
+## Erlang
+
+ERL := erl
+ERLC := $(ERL)c
+
+INCLUDE_DIRS := ../include $(wildcard ../deps/*/include)
+EBIN_DIRS := $(wildcard ../deps/*/ebin)
+ERLC_FLAGS := -W $(INCLUDE_DIRS:../%=-I ../%) $(EBIN_DIRS:%=-pa %)
+
+ifndef no_debug_info
+ ERLC_FLAGS += +debug_info
+endif
+
+ifdef debug
+ ERLC_FLAGS += -Ddebug
+endif
+
+EBIN_DIR := ../ebin
+DOC_DIR := ../doc
+EMULATOR := beam
+
+ERL_SOURCES := $(wildcard *.erl)
+ERL_HEADERS := $(wildcard *.hrl) $(wildcard ../include/*.hrl)
+ERL_OBJECTS := $(ERL_SOURCES:%.erl=$(EBIN_DIR)/%.$(EMULATOR))
+ERL_DOCUMENTS := $(ERL_SOURCES:%.erl=$(DOC_DIR)/%.html)
+ERL_OBJECTS_LOCAL := $(ERL_SOURCES:%.erl=./%.$(EMULATOR))
+APP_FILES := $(wildcard *.app)
+EBIN_FILES = $(ERL_OBJECTS) $(ERL_DOCUMENTS) $(APP_FILES:%.app=../ebin/%.app)
+EBIN_FILES_NO_DOCS = $(ERL_OBJECTS) $(APP_FILES:%.app=../ebin/%.app)
+MODULES = $(ERL_SOURCES:%.erl=%)
+
+../ebin/%.app: %.app
+ cp $< $@
+
+$(EBIN_DIR)/%.$(EMULATOR): %.erl
+ $(ERLC) $(ERLC_FLAGS) -o $(EBIN_DIR) $<
+
+./%.$(EMULATOR): %.erl
+ $(ERLC) $(ERLC_FLAGS) -o . $<
+
+$(DOC_DIR)/%.html: %.erl
+ $(ERL) -noshell -run edoc file $< -run init stop
+ mv *.html $(DOC_DIR)
diff --git a/deps/mochiweb-r88/scripts/new_mochiweb.erl b/deps/mochiweb-r88/scripts/new_mochiweb.erl
new file mode 100644
index 0000000..918f01e
--- /dev/null
+++ b/deps/mochiweb-r88/scripts/new_mochiweb.erl
@@ -0,0 +1,27 @@
+#!/usr/bin/env escript
+%% -*- mode: erlang -*-
+-export([main/1]).
+
+%% External API
+
+main([Name]) ->
+ main([Name, "."]);
+main([Name, Dest]) ->
+ ensure(),
+ DestDir = filename:absname(Dest),
+ ok = mochiweb_skel:skelcopy(DestDir, Name);
+main(_) ->
+ usage().
+
+%% Internal API
+
+ensure() ->
+ code:add_patha(filename:join(filename:dirname(escript:script_name()),
+ "../ebin")).
+
+usage() ->
+ io:format("usage: ~s name [destdir]~n",
+ [filename:basename(escript:script_name())]),
+ halt(1).
+
+
diff --git a/deps/mochiweb-r88/src/Makefile b/deps/mochiweb-r88/src/Makefile
new file mode 100644
index 0000000..af1e600
--- /dev/null
+++ b/deps/mochiweb-r88/src/Makefile
@@ -0,0 +1,12 @@
+include ../support/include.mk
+
+all: $(EBIN_FILES)
+
+debug:
+ $(MAKE) DEBUG=-DDEBUG
+
+clean:
+ rm -rf $(EBIN_FILES)
+
+test: all
+ $(ERL) -noshell -pa ../ebin -s mochiweb test -s init stop
diff --git a/deps/mochiweb-r88/src/mochifmt.erl b/deps/mochiweb-r88/src/mochifmt.erl
new file mode 100644
index 0000000..da0a133
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochifmt.erl
@@ -0,0 +1,426 @@
+%% @author Bob Ippolito
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc String Formatting for Erlang, inspired by Python 2.6
+%% (PEP 3101).
+%%
+-module(mochifmt).
+-author('bob@mochimedia.com').
+-export([format/2, format_field/2, convert_field/2, get_value/2, get_field/2]).
+-export([tokenize/1, format/3, get_field/3, format_field/3]).
+-export([bformat/2, bformat/3]).
+-export([f/2, f/3]).
+-export([test/0]).
+
+-record(conversion, {length, precision, ctype, align, fill_char, sign}).
+
+%% @spec tokenize(S::string()) -> tokens()
+%% @doc Tokenize a format string into mochifmt's internal format.
+tokenize(S) ->
+ {?MODULE, tokenize(S, "", [])}.
+
+%% @spec convert_field(Arg, Conversion::conversion()) -> term()
+%% @doc Process Arg according to the given explicit conversion specifier.
+convert_field(Arg, "") ->
+ Arg;
+convert_field(Arg, "r") ->
+ repr(Arg);
+convert_field(Arg, "s") ->
+ str(Arg).
+
+%% @spec get_value(Key::string(), Args::args()) -> term()
+%% @doc Get the Key from Args. If Args is a tuple then convert Key to
+%% an integer and get element(1 + Key, Args). If Args is a list and Key
+%% can be parsed as an integer then use lists:nth(1 + Key, Args),
+%% otherwise try and look for Key in Args as a proplist, converting
+%% Key to an atom or binary if necessary.
+get_value(Key, Args) when is_tuple(Args) ->
+ element(1 + list_to_integer(Key), Args);
+get_value(Key, Args) when is_list(Args) ->
+ try lists:nth(1 + list_to_integer(Key), Args)
+ catch error:_ ->
+ {_K, V} = proplist_lookup(Key, Args),
+ V
+ end.
+
+%% @spec get_field(Key::string(), Args) -> term()
+%% @doc Consecutively call get_value/2 on parts of Key delimited by ".",
+%% replacing Args with the result of the previous get_value. This
+%% is used to implement formats such as {0.0}.
+get_field(Key, Args) ->
+ get_field(Key, Args, ?MODULE).
+
+%% @spec get_field(Key::string(), Args, Module) -> term()
+%% @doc Consecutively call Module:get_value/2 on parts of Key delimited by ".",
+%% replacing Args with the result of the previous get_value. This
+%% is used to implement formats such as {0.0}.
+get_field(Key, Args, Module) ->
+ {Name, Next} = lists:splitwith(fun (C) -> C =/= $. end, Key),
+ Res = try Module:get_value(Name, Args)
+ catch error:undef -> get_value(Name, Args) end,
+ case Next of
+ "" ->
+ Res;
+ "." ++ S1 ->
+ get_field(S1, Res, Module)
+ end.
+
+%% @spec format(Format::string(), Args) -> iolist()
+%% @doc Format Args with Format.
+format(Format, Args) ->
+ format(Format, Args, ?MODULE).
+
+%% @spec format(Format::string(), Args, Module) -> iolist()
+%% @doc Format Args with Format using Module.
+format({?MODULE, Parts}, Args, Module) ->
+ format2(Parts, Args, Module, []);
+format(S, Args, Module) ->
+ format(tokenize(S), Args, Module).
+
+%% @spec format_field(Arg, Format) -> iolist()
+%% @doc Format Arg with Format.
+format_field(Arg, Format) ->
+ format_field(Arg, Format, ?MODULE).
+
+%% @spec format_field(Arg, Format, _Module) -> iolist()
+%% @doc Format Arg with Format.
+format_field(Arg, Format, _Module) ->
+ F = default_ctype(Arg, parse_std_conversion(Format)),
+ fix_padding(fix_sign(convert2(Arg, F), F), F).
+
+%% @spec f(Format::string(), Args) -> string()
+%% @doc Format Args with Format and return a string().
+f(Format, Args) ->
+ f(Format, Args, ?MODULE).
+
+%% @spec f(Format::string(), Args, Module) -> string()
+%% @doc Format Args with Format using Module and return a string().
+f(Format, Args, Module) ->
+ case lists:member(${, Format) of
+ true ->
+ binary_to_list(bformat(Format, Args, Module));
+ false ->
+ Format
+ end.
+
+%% @spec bformat(Format::string(), Args) -> binary()
+%% @doc Format Args with Format and return a binary().
+bformat(Format, Args) ->
+ iolist_to_binary(format(Format, Args)).
+
+%% @spec bformat(Format::string(), Args, Module) -> binary()
+%% @doc Format Args with Format using Module and return a binary().
+bformat(Format, Args, Module) ->
+ iolist_to_binary(format(Format, Args, Module)).
+
+%% @spec test() -> ok
+%% @doc Run tests.
+test() ->
+ ok = test_tokenize(),
+ ok = test_format(),
+ ok = test_std(),
+ ok = test_records(),
+ ok.
+
+%% Internal API
+
+add_raw("", Acc) ->
+ Acc;
+add_raw(S, Acc) ->
+ [{raw, lists:reverse(S)} | Acc].
+
+tokenize([], S, Acc) ->
+ lists:reverse(add_raw(S, Acc));
+tokenize("{{" ++ Rest, S, Acc) ->
+ tokenize(Rest, "{" ++ S, Acc);
+tokenize("{" ++ Rest, S, Acc) ->
+ {Format, Rest1} = tokenize_format(Rest),
+ tokenize(Rest1, "", [{format, make_format(Format)} | add_raw(S, Acc)]);
+tokenize("}}" ++ Rest, S, Acc) ->
+ tokenize(Rest, "}" ++ S, Acc);
+tokenize([C | Rest], S, Acc) ->
+ tokenize(Rest, [C | S], Acc).
+
+tokenize_format(S) ->
+ tokenize_format(S, 1, []).
+
+tokenize_format("}" ++ Rest, 1, Acc) ->
+ {lists:reverse(Acc), Rest};
+tokenize_format("}" ++ Rest, N, Acc) ->
+ tokenize_format(Rest, N - 1, "}" ++ Acc);
+tokenize_format("{" ++ Rest, N, Acc) ->
+ tokenize_format(Rest, 1 + N, "{" ++ Acc);
+tokenize_format([C | Rest], N, Acc) ->
+ tokenize_format(Rest, N, [C | Acc]).
+
+make_format(S) ->
+ {Name0, Spec} = case lists:splitwith(fun (C) -> C =/= $: end, S) of
+ {_, ""} ->
+ {S, ""};
+ {SN, ":" ++ SS} ->
+ {SN, SS}
+ end,
+ {Name, Transform} = case lists:splitwith(fun (C) -> C =/= $! end, Name0) of
+ {_, ""} ->
+ {Name0, ""};
+ {TN, "!" ++ TT} ->
+ {TN, TT}
+ end,
+ {Name, Transform, Spec}.
+
+proplist_lookup(S, P) ->
+ A = try list_to_existing_atom(S)
+ catch error:_ -> make_ref() end,
+ B = try list_to_binary(S)
+ catch error:_ -> make_ref() end,
+ proplist_lookup2({S, A, B}, P).
+
+proplist_lookup2({KS, KA, KB}, [{K, V} | _])
+ when KS =:= K orelse KA =:= K orelse KB =:= K ->
+ {K, V};
+proplist_lookup2(Keys, [_ | Rest]) ->
+ proplist_lookup2(Keys, Rest).
+
+format2([], _Args, _Module, Acc) ->
+ lists:reverse(Acc);
+format2([{raw, S} | Rest], Args, Module, Acc) ->
+ format2(Rest, Args, Module, [S | Acc]);
+format2([{format, {Key, Convert, Format0}} | Rest], Args, Module, Acc) ->
+ Format = f(Format0, Args, Module),
+ V = case Module of
+ ?MODULE ->
+ V0 = get_field(Key, Args),
+ V1 = convert_field(V0, Convert),
+ format_field(V1, Format);
+ _ ->
+ V0 = try Module:get_field(Key, Args)
+ catch error:undef -> get_field(Key, Args, Module) end,
+ V1 = try Module:convert_field(V0, Convert)
+ catch error:undef -> convert_field(V0, Convert) end,
+ try Module:format_field(V1, Format)
+ catch error:undef -> format_field(V1, Format, Module) end
+ end,
+ format2(Rest, Args, Module, [V | Acc]).
+
+default_ctype(_Arg, C=#conversion{ctype=N}) when N =/= undefined ->
+ C;
+default_ctype(Arg, C) when is_integer(Arg) ->
+ C#conversion{ctype=decimal};
+default_ctype(Arg, C) when is_float(Arg) ->
+ C#conversion{ctype=general};
+default_ctype(_Arg, C) ->
+ C#conversion{ctype=string}.
+
+fix_padding(Arg, #conversion{length=undefined}) ->
+ Arg;
+fix_padding(Arg, F=#conversion{length=Length, fill_char=Fill0, align=Align0,
+ ctype=Type}) ->
+ Padding = Length - iolist_size(Arg),
+ Fill = case Fill0 of
+ undefined ->
+ $\s;
+ _ ->
+ Fill0
+ end,
+ Align = case Align0 of
+ undefined ->
+ case Type of
+ string ->
+ left;
+ _ ->
+ right
+ end;
+ _ ->
+ Align0
+ end,
+ case Padding > 0 of
+ true ->
+ do_padding(Arg, Padding, Fill, Align, F);
+ false ->
+ Arg
+ end.
+
+do_padding(Arg, Padding, Fill, right, _F) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding(Arg, Padding, Fill, center, _F) ->
+ LPadding = lists:duplicate(Padding div 2, Fill),
+ RPadding = case Padding band 1 of
+ 1 ->
+ [Fill | LPadding];
+ _ ->
+ LPadding
+ end,
+ [LPadding, Arg, RPadding];
+do_padding([$- | Arg], Padding, Fill, sign_right, _F) ->
+ [[$- | lists:duplicate(Padding, Fill)], Arg];
+do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=$-}) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding([S | Arg], Padding, Fill, sign_right, #conversion{sign=S}) ->
+ [[S | lists:duplicate(Padding, Fill)], Arg];
+do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=undefined}) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding(Arg, Padding, Fill, left, _F) ->
+ [Arg | lists:duplicate(Padding, Fill)].
+
+fix_sign(Arg, #conversion{sign=$+}) when Arg >= 0 ->
+ [$+, Arg];
+fix_sign(Arg, #conversion{sign=$\s}) when Arg >= 0 ->
+ [$\s, Arg];
+fix_sign(Arg, _F) ->
+ Arg.
+
+ctype($\%) -> percent;
+ctype($s) -> string;
+ctype($b) -> bin;
+ctype($o) -> oct;
+ctype($X) -> upper_hex;
+ctype($x) -> hex;
+ctype($c) -> char;
+ctype($d) -> decimal;
+ctype($g) -> general;
+ctype($f) -> fixed;
+ctype($e) -> exp.
+
+align($<) -> left;
+align($>) -> right;
+align($^) -> center;
+align($=) -> sign_right.
+
+convert2(Arg, F=#conversion{ctype=percent}) ->
+ [convert2(100.0 * Arg, F#conversion{ctype=fixed}), $\%];
+convert2(Arg, #conversion{ctype=string}) ->
+ str(Arg);
+convert2(Arg, #conversion{ctype=bin}) ->
+ erlang:integer_to_list(Arg, 2);
+convert2(Arg, #conversion{ctype=oct}) ->
+ erlang:integer_to_list(Arg, 8);
+convert2(Arg, #conversion{ctype=upper_hex}) ->
+ erlang:integer_to_list(Arg, 16);
+convert2(Arg, #conversion{ctype=hex}) ->
+ string:to_lower(erlang:integer_to_list(Arg, 16));
+convert2(Arg, #conversion{ctype=char}) when Arg < 16#80 ->
+ [Arg];
+convert2(Arg, #conversion{ctype=char}) ->
+ xmerl_ucs:to_utf8(Arg);
+convert2(Arg, #conversion{ctype=decimal}) ->
+ integer_to_list(Arg);
+convert2(Arg, #conversion{ctype=general, precision=undefined}) ->
+ try mochinum:digits(Arg)
+ catch error:undef -> io_lib:format("~g", [Arg]) end;
+convert2(Arg, #conversion{ctype=fixed, precision=undefined}) ->
+ io_lib:format("~f", [Arg]);
+convert2(Arg, #conversion{ctype=exp, precision=undefined}) ->
+ io_lib:format("~e", [Arg]);
+convert2(Arg, #conversion{ctype=general, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "g", [Arg]);
+convert2(Arg, #conversion{ctype=fixed, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "f", [Arg]);
+convert2(Arg, #conversion{ctype=exp, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "e", [Arg]).
+
+str(A) when is_atom(A) ->
+ atom_to_list(A);
+str(I) when is_integer(I) ->
+ integer_to_list(I);
+str(F) when is_float(F) ->
+ try mochinum:digits(F)
+ catch error:undef -> io_lib:format("~g", [F]) end;
+str(L) when is_list(L) ->
+ L;
+str(B) when is_binary(B) ->
+ B;
+str(P) ->
+ repr(P).
+
+repr(P) when is_float(P) ->
+ try mochinum:digits(P)
+ catch error:undef -> float_to_list(P) end;
+repr(P) ->
+ io_lib:format("~p", [P]).
+
+parse_std_conversion(S) ->
+ parse_std_conversion(S, #conversion{}).
+
+parse_std_conversion("", Acc) ->
+ Acc;
+parse_std_conversion([Fill, Align | Spec], Acc)
+ when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
+ parse_std_conversion(Spec, Acc#conversion{fill_char=Fill,
+ align=align(Align)});
+parse_std_conversion([Align | Spec], Acc)
+ when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
+ parse_std_conversion(Spec, Acc#conversion{align=align(Align)});
+parse_std_conversion([Sign | Spec], Acc)
+ when Sign =:= $+ orelse Sign =:= $- orelse Sign =:= $\s ->
+ parse_std_conversion(Spec, Acc#conversion{sign=Sign});
+parse_std_conversion("0" ++ Spec, Acc) ->
+ Align = case Acc#conversion.align of
+ undefined ->
+ sign_right;
+ A ->
+ A
+ end,
+ parse_std_conversion(Spec, Acc#conversion{fill_char=$0, align=Align});
+parse_std_conversion(Spec=[D|_], Acc) when D >= $0 andalso D =< $9 ->
+ {W, Spec1} = lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec),
+ parse_std_conversion(Spec1, Acc#conversion{length=list_to_integer(W)});
+parse_std_conversion([$. | Spec], Acc) ->
+ case lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec) of
+ {"", Spec1} ->
+ parse_std_conversion(Spec1, Acc);
+ {P, Spec1} ->
+ parse_std_conversion(Spec1,
+ Acc#conversion{precision=list_to_integer(P)})
+ end;
+parse_std_conversion([Type], Acc) ->
+ parse_std_conversion("", Acc#conversion{ctype=ctype(Type)}).
+
+test_tokenize() ->
+ {?MODULE, [{raw, "ABC"}]} = tokenize("ABC"),
+ {?MODULE, [{format, {"0", "", ""}}]} = tokenize("{0}"),
+ {?MODULE, [{raw, "ABC"}, {format, {"1", "", ""}}, {raw, "DEF"}]} =
+ tokenize("ABC{1}DEF"),
+ ok.
+
+test_format() ->
+ <<" -4">> = bformat("{0:4}", [-4]),
+ <<" 4">> = bformat("{0:4}", [4]),
+ <<" 4">> = bformat("{0:{0}}", [4]),
+ <<"4 ">> = bformat("{0:4}", ["4"]),
+ <<"4 ">> = bformat("{0:{0}}", ["4"]),
+ <<"1.2yoDEF">> = bformat("{2}{0}{1}{3}", {yo, "DE", 1.2, <<"F">>}),
+ <<"cafebabe">> = bformat("{0:x}", {16#cafebabe}),
+ <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
+ <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
+ <<"755">> = bformat("{0:o}", {8#755}),
+ <<"a">> = bformat("{0:c}", {97}),
+ %% Horizontal ellipsis
+ <<226, 128, 166>> = bformat("{0:c}", {16#2026}),
+ <<"11">> = bformat("{0:b}", {3}),
+ <<"11">> = bformat("{0:b}", [3]),
+ <<"11">> = bformat("{three:b}", [{three, 3}]),
+ <<"11">> = bformat("{three:b}", [{"three", 3}]),
+ <<"11">> = bformat("{three:b}", [{<<"three">>, 3}]),
+ <<"\"foo\"">> = bformat("{0!r}", {"foo"}),
+ <<"2008-5-4">> = bformat("{0.0}-{0.1}-{0.2}", {{2008,5,4}}),
+ <<"2008-05-04">> = bformat("{0.0:04}-{0.1:02}-{0.2:02}", {{2008,5,4}}),
+ <<"foo6bar-6">> = bformat("foo{1}{0}-{1}", {bar, 6}),
+ <<"-'atom test'-">> = bformat("-{arg!r}-", [{arg, 'atom test'}]),
+ <<"2008-05-04">> = bformat("{0.0:0{1.0}}-{0.1:0{1.1}}-{0.2:0{1.2}}",
+ {{2008,5,4}, {4, 2, 2}}),
+ ok.
+
+test_std() ->
+ M = mochifmt_std:new(),
+ <<"01">> = bformat("{0}{1}", [0, 1], M),
+ ok.
+
+test_records() ->
+ M = mochifmt_records:new([{conversion, record_info(fields, conversion)}]),
+ R = #conversion{length=long, precision=hard, sign=peace},
+ long = M:get_value("length", R),
+ hard = M:get_value("precision", R),
+ peace = M:get_value("sign", R),
+ <<"long hard">> = bformat("{length} {precision}", R, M),
+ <<"long hard">> = bformat("{0.length} {0.precision}", [R], M),
+ ok.
diff --git a/deps/mochiweb-r88/src/mochifmt_records.erl b/deps/mochiweb-r88/src/mochifmt_records.erl
new file mode 100644
index 0000000..94c7797
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochifmt_records.erl
@@ -0,0 +1,30 @@
+%% @author Bob Ippolito
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc Formatter that understands records.
+%%
+%% Usage:
+%%
+%% 1> M = mochifmt_records:new([{rec, record_info(fields, rec)}]),
+%% M:format("{0.bar}", [#rec{bar=foo}]).
+%% foo
+
+-module(mochifmt_records, [Recs]).
+-author('bob@mochimedia.com').
+-export([get_value/2]).
+
+get_value(Key, Rec) when is_tuple(Rec) and is_atom(element(1, Rec)) ->
+ try begin
+ Atom = list_to_existing_atom(Key),
+ {_, Fields} = proplists:lookup(element(1, Rec), Recs),
+ element(get_rec_index(Atom, Fields, 2), Rec)
+ end
+ catch error:_ -> mochifmt:get_value(Key, Rec)
+ end;
+get_value(Key, Args) ->
+ mochifmt:get_value(Key, Args).
+
+get_rec_index(Atom, [Atom | _], Index) ->
+ Index;
+get_rec_index(Atom, [_ | Rest], Index) ->
+ get_rec_index(Atom, Rest, 1 + Index).
diff --git a/deps/mochiweb-r88/src/mochifmt_std.erl b/deps/mochiweb-r88/src/mochifmt_std.erl
new file mode 100644
index 0000000..9442016
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochifmt_std.erl
@@ -0,0 +1,23 @@
+%% @author Bob Ippolito
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc Template module for a mochifmt formatter.
+
+-module(mochifmt_std, []).
+-author('bob@mochimedia.com').
+-export([format/2, get_value/2, format_field/2, get_field/2, convert_field/2]).
+
+format(Format, Args) ->
+ mochifmt:format(Format, Args, THIS).
+
+get_field(Key, Args) ->
+ mochifmt:get_field(Key, Args, THIS).
+
+convert_field(Key, Args) ->
+ mochifmt:convert_field(Key, Args).
+
+get_value(Key, Args) ->
+ mochifmt:get_value(Key, Args).
+
+format_field(Arg, Format) ->
+ mochifmt:format_field(Arg, Format, THIS).
diff --git a/deps/mochiweb-r88/src/mochihex.erl b/deps/mochiweb-r88/src/mochihex.erl
new file mode 100644
index 0000000..7fe6899
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochihex.erl
@@ -0,0 +1,75 @@
+%% @author Bob Ippolito
+%% @copyright 2006 Mochi Media, Inc.
+
+%% @doc Utilities for working with hexadecimal strings.
+
+-module(mochihex).
+-author('bob@mochimedia.com').
+
+-export([test/0, to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
+
+%% @type iolist() = [char() | binary() | iolist()]
+%% @type iodata() = iolist() | binary()
+
+%% @spec to_hex(integer | iolist()) -> string()
+%% @doc Convert an iolist to a hexadecimal string.
+to_hex(0) ->
+ "0";
+to_hex(I) when is_integer(I), I > 0 ->
+ to_hex_int(I, []);
+to_hex(B) ->
+ to_hex(iolist_to_binary(B), []).
+
+%% @spec to_bin(string()) -> binary()
+%% @doc Convert a hexadecimal string to a binary.
+to_bin(L) ->
+ to_bin(L, []).
+
+%% @spec to_int(string()) -> integer()
+%% @doc Convert a hexadecimal string to an integer.
+to_int(L) ->
+ erlang:list_to_integer(L, 16).
+
+%% @spec dehex(char()) -> integer()
+%% @doc Convert a hex digit to its integer value.
+dehex(C) when C >= $0, C =< $9 ->
+ C - $0;
+dehex(C) when C >= $a, C =< $f ->
+ C - $a + 10;
+dehex(C) when C >= $A, C =< $F ->
+ C - $A + 10.
+
+%% @spec hexdigit(integer()) -> char()
+%% @doc Convert an integer less than 16 to a hex digit.
+hexdigit(C) when C >= 0, C =< 9 ->
+ C + $0;
+hexdigit(C) when C =< 15 ->
+ C + $a - 10.
+
+%% @spec test() -> ok
+%% @doc Test this module.
+test() ->
+ "ff000ff1" = to_hex([255, 0, 15, 241]),
+ <<255, 0, 15, 241>> = to_bin("ff000ff1"),
+ 16#ff000ff1 = to_int("ff000ff1"),
+ "ff000ff1" = to_hex(16#ff000ff1),
+ ok.
+
+
+%% Internal API
+
+to_hex(<<>>, Acc) ->
+ lists:reverse(Acc);
+to_hex(<>, Acc) ->
+ to_hex(Rest, [hexdigit(C2), hexdigit(C1) | Acc]).
+
+to_hex_int(0, Acc) ->
+ Acc;
+to_hex_int(I, Acc) ->
+ to_hex_int(I bsr 4, [hexdigit(I band 15) | Acc]).
+
+to_bin([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc));
+to_bin([C1, C2 | Rest], Acc) ->
+ to_bin(Rest, [(dehex(C1) bsl 4) bor dehex(C2) | Acc]).
+
diff --git a/deps/mochiweb-r88/src/mochijson.erl b/deps/mochiweb-r88/src/mochijson.erl
new file mode 100644
index 0000000..0ce5b5e
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochijson.erl
@@ -0,0 +1,529 @@
+%% @author Bob Ippolito
+%% @copyright 2006 Mochi Media, Inc.
+
+%% @doc Yet another JSON (RFC 4627) library for Erlang.
+-module(mochijson).
+-author('bob@mochimedia.com').
+-export([encoder/1, encode/1]).
+-export([decoder/1, decode/1]).
+-export([binary_encoder/1, binary_encode/1]).
+-export([binary_decoder/1, binary_decode/1]).
+-export([test/0]).
+
+% This is a macro to placate syntax highlighters..
+-define(Q, $\").
+-define(ADV_COL(S, N), S#decoder{column=N+S#decoder.column}).
+-define(INC_COL(S), S#decoder{column=1+S#decoder.column}).
+-define(INC_LINE(S), S#decoder{column=1, line=1+S#decoder.line}).
+
+%% @type iolist() = [char() | binary() | iolist()]
+%% @type iodata() = iolist() | binary()
+%% @type json_string() = atom | string() | binary()
+%% @type json_number() = integer() | float()
+%% @type json_array() = {array, [json_term()]}
+%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_term() = json_string() | json_number() | json_array() |
+%% json_object()
+%% @type encoding() = utf8 | unicode
+%% @type encoder_option() = {input_encoding, encoding()} |
+%% {handler, function()}
+%% @type decoder_option() = {input_encoding, encoding()} |
+%% {object_hook, function()}
+%% @type bjson_string() = binary()
+%% @type bjson_number() = integer() | float()
+%% @type bjson_array() = [bjson_term()]
+%% @type bjson_object() = {struct, [{bjson_string(), bjson_term()}]}
+%% @type bjson_term() = bjson_string() | bjson_number() | bjson_array() |
+%% bjson_object()
+%% @type binary_encoder_option() = {handler, function()}
+%% @type binary_decoder_option() = {object_hook, function()}
+
+-record(encoder, {input_encoding=unicode,
+ handler=null}).
+
+-record(decoder, {input_encoding=utf8,
+ object_hook=null,
+ line=1,
+ column=1,
+ state=null}).
+
+%% @spec encoder([encoder_option()]) -> function()
+%% @doc Create an encoder/1 with the given options.
+encoder(Options) ->
+ State = parse_encoder_options(Options, #encoder{}),
+ fun (O) -> json_encode(O, State) end.
+
+%% @spec encode(json_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist.
+encode(Any) ->
+ json_encode(Any, #encoder{}).
+
+%% @spec decoder([decoder_option()]) -> function()
+%% @doc Create a decoder/1 with the given options.
+decoder(Options) ->
+ State = parse_decoder_options(Options, #decoder{}),
+ fun (O) -> json_decode(O, State) end.
+
+%% @spec decode(iolist()) -> json_term()
+%% @doc Decode the given iolist to Erlang terms.
+decode(S) ->
+ json_decode(S, #decoder{}).
+
+%% @spec binary_decoder([binary_decoder_option()]) -> function()
+%% @doc Create a binary_decoder/1 with the given options.
+binary_decoder(Options) ->
+ mochijson2:decoder(Options).
+
+%% @spec binary_encoder([binary_encoder_option()]) -> function()
+%% @doc Create a binary_encoder/1 with the given options.
+binary_encoder(Options) ->
+ mochijson2:encoder(Options).
+
+%% @spec binary_encode(bjson_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist, using lists for arrays and
+%% binaries for strings.
+binary_encode(Any) ->
+ mochijson2:encode(Any).
+
+%% @spec binary_decode(iolist()) -> bjson_term()
+%% @doc Decode the given iolist to Erlang terms, using lists for arrays and
+%% binaries for strings.
+binary_decode(S) ->
+ mochijson2:decode(S).
+
+test() ->
+ test_all(),
+ mochijson2:test().
+
+%% Internal API
+
+parse_encoder_options([], State) ->
+ State;
+parse_encoder_options([{input_encoding, Encoding} | Rest], State) ->
+ parse_encoder_options(Rest, State#encoder{input_encoding=Encoding});
+parse_encoder_options([{handler, Handler} | Rest], State) ->
+ parse_encoder_options(Rest, State#encoder{handler=Handler}).
+
+parse_decoder_options([], State) ->
+ State;
+parse_decoder_options([{input_encoding, Encoding} | Rest], State) ->
+ parse_decoder_options(Rest, State#decoder{input_encoding=Encoding});
+parse_decoder_options([{object_hook, Hook} | Rest], State) ->
+ parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
+
+json_encode(true, _State) ->
+ "true";
+json_encode(false, _State) ->
+ "false";
+json_encode(null, _State) ->
+ "null";
+json_encode(I, _State) when is_integer(I) ->
+ integer_to_list(I);
+json_encode(F, _State) when is_float(F) ->
+ mochinum:digits(F);
+json_encode(L, State) when is_list(L); is_binary(L); is_atom(L) ->
+ json_encode_string(L, State);
+json_encode({array, Props}, State) when is_list(Props) ->
+ json_encode_array(Props, State);
+json_encode({struct, Props}, State) when is_list(Props) ->
+ json_encode_proplist(Props, State);
+json_encode(Bad, #encoder{handler=null}) ->
+ exit({json_encode, {bad_term, Bad}});
+json_encode(Bad, State=#encoder{handler=Handler}) ->
+ json_encode(Handler(Bad), State).
+
+json_encode_array([], _State) ->
+ "[]";
+json_encode_array(L, State) ->
+ F = fun (O, Acc) ->
+ [$,, json_encode(O, State) | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "[", L),
+ lists:reverse([$\] | Acc1]).
+
+json_encode_proplist([], _State) ->
+ "{}";
+json_encode_proplist(Props, State) ->
+ F = fun ({K, V}, Acc) ->
+ KS = case K of
+ K when is_atom(K) ->
+ json_encode_string_utf8(atom_to_list(K));
+ K when is_integer(K) ->
+ json_encode_string(integer_to_list(K), State);
+ K when is_list(K); is_binary(K) ->
+ json_encode_string(K, State)
+ end,
+ VS = json_encode(V, State),
+ [$,, VS, $:, KS | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "{", Props),
+ lists:reverse([$\} | Acc1]).
+
+json_encode_string(A, _State) when is_atom(A) ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(atom_to_list(A)));
+json_encode_string(B, _State) when is_binary(B) ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(B));
+json_encode_string(S, #encoder{input_encoding=utf8}) ->
+ json_encode_string_utf8(S);
+json_encode_string(S, #encoder{input_encoding=unicode}) ->
+ json_encode_string_unicode(S).
+
+json_encode_string_utf8(S) ->
+ [?Q | json_encode_string_utf8_1(S)].
+
+json_encode_string_utf8_1([C | Cs]) when C >= 0, C =< 16#7f ->
+ NewC = case C of
+ $\\ -> "\\\\";
+ ?Q -> "\\\"";
+ _ when C >= $\s, C < 16#7f -> C;
+ $\t -> "\\t";
+ $\n -> "\\n";
+ $\r -> "\\r";
+ $\f -> "\\f";
+ $\b -> "\\b";
+ _ when C >= 0, C =< 16#7f -> unihex(C);
+ _ -> exit({json_encode, {bad_char, C}})
+ end,
+ [NewC | json_encode_string_utf8_1(Cs)];
+json_encode_string_utf8_1(All=[C | _]) when C >= 16#80, C =< 16#10FFFF ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(All));
+json_encode_string_utf8_1([]) ->
+ "\"".
+
+json_encode_string_unicode(S) ->
+ [?Q | json_encode_string_unicode_1(S)].
+
+json_encode_string_unicode_1([C | Cs]) ->
+ NewC = case C of
+ $\\ -> "\\\\";
+ ?Q -> "\\\"";
+ _ when C >= $\s, C < 16#7f -> C;
+ $\t -> "\\t";
+ $\n -> "\\n";
+ $\r -> "\\r";
+ $\f -> "\\f";
+ $\b -> "\\b";
+ _ when C >= 0, C =< 16#10FFFF -> unihex(C);
+ _ -> exit({json_encode, {bad_char, C}})
+ end,
+ [NewC | json_encode_string_unicode_1(Cs)];
+json_encode_string_unicode_1([]) ->
+ "\"".
+
+dehex(C) when C >= $0, C =< $9 ->
+ C - $0;
+dehex(C) when C >= $a, C =< $f ->
+ C - $a + 10;
+dehex(C) when C >= $A, C =< $F ->
+ C - $A + 10.
+
+hexdigit(C) when C >= 0, C =< 9 ->
+ C + $0;
+hexdigit(C) when C =< 15 ->
+ C + $a - 10.
+
+unihex(C) when C < 16#10000 ->
+ <> = <>,
+ Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
+ [$\\, $u | Digits];
+unihex(C) when C =< 16#10FFFF ->
+ N = C - 16#10000,
+ S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
+ S2 = 16#dc00 bor (N band 16#3ff),
+ [unihex(S1), unihex(S2)].
+
+json_decode(B, S) when is_binary(B) ->
+ json_decode(binary_to_list(B), S);
+json_decode(L, S) ->
+ {Res, L1, S1} = decode1(L, S),
+ {eof, [], _} = tokenize(L1, S1#decoder{state=trim}),
+ Res.
+
+decode1(L, S=#decoder{state=null}) ->
+ case tokenize(L, S#decoder{state=any}) of
+ {{const, C}, L1, S1} ->
+ {C, L1, S1};
+ {start_array, L1, S1} ->
+ decode_array(L1, S1#decoder{state=any}, []);
+ {start_object, L1, S1} ->
+ decode_object(L1, S1#decoder{state=key}, [])
+ end.
+
+make_object(V, #decoder{object_hook=null}) ->
+ V;
+make_object(V, #decoder{object_hook=Hook}) ->
+ Hook(V).
+
+decode_object(L, S=#decoder{state=key}, Acc) ->
+ case tokenize(L, S) of
+ {end_object, Rest, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, Rest, S1#decoder{state=null}};
+ {{const, K}, Rest, S1} when is_list(K) ->
+ {colon, L2, S2} = tokenize(Rest, S1),
+ {V, L3, S3} = decode1(L2, S2#decoder{state=null}),
+ decode_object(L3, S3#decoder{state=comma}, [{K, V} | Acc])
+ end;
+decode_object(L, S=#decoder{state=comma}, Acc) ->
+ case tokenize(L, S) of
+ {end_object, Rest, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, Rest, S1#decoder{state=null}};
+ {comma, Rest, S1} ->
+ decode_object(Rest, S1#decoder{state=key}, Acc)
+ end.
+
+decode_array(L, S=#decoder{state=any}, Acc) ->
+ case tokenize(L, S) of
+ {end_array, Rest, S1} ->
+ {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
+ {start_array, Rest, S1} ->
+ {Array, Rest1, S2} = decode_array(Rest, S1#decoder{state=any}, []),
+ decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
+ {start_object, Rest, S1} ->
+ {Array, Rest1, S2} = decode_object(Rest, S1#decoder{state=key}, []),
+ decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
+ {{const, Const}, Rest, S1} ->
+ decode_array(Rest, S1#decoder{state=comma}, [Const | Acc])
+ end;
+decode_array(L, S=#decoder{state=comma}, Acc) ->
+ case tokenize(L, S) of
+ {end_array, Rest, S1} ->
+ {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
+ {comma, Rest, S1} ->
+ decode_array(Rest, S1#decoder{state=any}, Acc)
+ end.
+
+tokenize_string(IoList=[C | _], S=#decoder{input_encoding=utf8}, Acc)
+ when is_list(C); is_binary(C); C >= 16#7f ->
+ List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
+ tokenize_string(List, S#decoder{input_encoding=unicode}, Acc);
+tokenize_string("\"" ++ Rest, S, Acc) ->
+ {lists:reverse(Acc), Rest, ?INC_COL(S)};
+tokenize_string("\\\"" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\" | Acc]);
+tokenize_string("\\\\" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\\ | Acc]);
+tokenize_string("\\/" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$/ | Acc]);
+tokenize_string("\\b" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\b | Acc]);
+tokenize_string("\\f" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\f | Acc]);
+tokenize_string("\\n" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\n | Acc]);
+tokenize_string("\\r" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\r | Acc]);
+tokenize_string("\\t" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\t | Acc]);
+tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
+ % coalesce UTF-16 surrogate pair?
+ C = dehex(C0) bor
+ (dehex(C1) bsl 4) bor
+ (dehex(C2) bsl 8) bor
+ (dehex(C3) bsl 12),
+ tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
+tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
+ tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
+
+tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
+ when is_list(C); is_binary(C); C >= 16#7f ->
+ List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
+ tokenize_number(List, Mode, S#decoder{input_encoding=unicode}, Acc);
+tokenize_number([$- | Rest], sign, S, []) ->
+ tokenize_number(Rest, int, ?INC_COL(S), [$-]);
+tokenize_number(Rest, sign, S, []) ->
+ tokenize_number(Rest, int, S, []);
+tokenize_number([$0 | Rest], int, S, Acc) ->
+ tokenize_number(Rest, frac, ?INC_COL(S), [$0 | Acc]);
+tokenize_number([C | Rest], int, S, Acc) when C >= $1, C =< $9 ->
+ tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
+tokenize_number([C | Rest], int1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, int1, S, Acc) ->
+ tokenize_number(Rest, frac, S, Acc);
+tokenize_number([$., C | Rest], frac, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
+tokenize_number([E | Rest], frac, S, Acc) when E == $e; E == $E ->
+ tokenize_number(Rest, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
+tokenize_number(Rest, frac, S, Acc) ->
+ {{int, lists:reverse(Acc)}, Rest, S};
+tokenize_number([C | Rest], frac1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, frac1, ?INC_COL(S), [C | Acc]);
+tokenize_number([E | Rest], frac1, S, Acc) when E == $e; E == $E ->
+ tokenize_number(Rest, esign, ?INC_COL(S), [$e | Acc]);
+tokenize_number(Rest, frac1, S, Acc) ->
+ {{float, lists:reverse(Acc)}, Rest, S};
+tokenize_number([C | Rest], esign, S, Acc) when C == $-; C == $+ ->
+ tokenize_number(Rest, eint, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, esign, S, Acc) ->
+ tokenize_number(Rest, eint, S, Acc);
+tokenize_number([C | Rest], eint, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
+tokenize_number([C | Rest], eint1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, eint1, S, Acc) ->
+ {{float, lists:reverse(Acc)}, Rest, S}.
+
+tokenize([], S=#decoder{state=trim}) ->
+ {eof, [], S};
+tokenize([L | Rest], S) when is_list(L) ->
+ tokenize(L ++ Rest, S);
+tokenize([B | Rest], S) when is_binary(B) ->
+ tokenize(xmerl_ucs:from_utf8(B) ++ Rest, S);
+tokenize("\r\n" ++ Rest, S) ->
+ tokenize(Rest, ?INC_LINE(S));
+tokenize("\n" ++ Rest, S) ->
+ tokenize(Rest, ?INC_LINE(S));
+tokenize([C | Rest], S) when C == $\s; C == $\t ->
+ tokenize(Rest, ?INC_COL(S));
+tokenize("{" ++ Rest, S) ->
+ {start_object, Rest, ?INC_COL(S)};
+tokenize("}" ++ Rest, S) ->
+ {end_object, Rest, ?INC_COL(S)};
+tokenize("[" ++ Rest, S) ->
+ {start_array, Rest, ?INC_COL(S)};
+tokenize("]" ++ Rest, S) ->
+ {end_array, Rest, ?INC_COL(S)};
+tokenize("," ++ Rest, S) ->
+ {comma, Rest, ?INC_COL(S)};
+tokenize(":" ++ Rest, S) ->
+ {colon, Rest, ?INC_COL(S)};
+tokenize("null" ++ Rest, S) ->
+ {{const, null}, Rest, ?ADV_COL(S, 4)};
+tokenize("true" ++ Rest, S) ->
+ {{const, true}, Rest, ?ADV_COL(S, 4)};
+tokenize("false" ++ Rest, S) ->
+ {{const, false}, Rest, ?ADV_COL(S, 5)};
+tokenize("\"" ++ Rest, S) ->
+ {String, Rest1, S1} = tokenize_string(Rest, ?INC_COL(S), []),
+ {{const, String}, Rest1, S1};
+tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
+ case tokenize_number(L, sign, S, []) of
+ {{int, Int}, Rest, S1} ->
+ {{const, list_to_integer(Int)}, Rest, S1};
+ {{float, Float}, Rest, S1} ->
+ {{const, list_to_float(Float)}, Rest, S1}
+ end.
+
+%% testing constructs borrowed from the Yaws JSON implementation.
+
+%% Create an object from a list of Key/Value pairs.
+
+obj_new() ->
+ {struct, []}.
+
+is_obj({struct, Props}) ->
+ F = fun ({K, _}) when is_list(K) ->
+ true;
+ (_) ->
+ false
+ end,
+ lists:all(F, Props).
+
+obj_from_list(Props) ->
+ Obj = {struct, Props},
+ case is_obj(Obj) of
+ true -> Obj;
+ false -> exit(json_bad_object)
+ end.
+
+%% Test for equivalence of Erlang terms.
+%% Due to arbitrary order of construction, equivalent objects might
+%% compare unequal as erlang terms, so we need to carefully recurse
+%% through aggregates (tuples and objects).
+
+equiv({struct, Props1}, {struct, Props2}) ->
+ equiv_object(Props1, Props2);
+equiv({array, L1}, {array, L2}) ->
+ equiv_list(L1, L2);
+equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
+equiv(S1, S2) when is_list(S1), is_list(S2) -> S1 == S2;
+equiv(true, true) -> true;
+equiv(false, false) -> true;
+equiv(null, null) -> true.
+
+%% Object representation and traversal order is unknown.
+%% Use the sledgehammer and sort property lists.
+
+equiv_object(Props1, Props2) ->
+ L1 = lists:keysort(1, Props1),
+ L2 = lists:keysort(1, Props2),
+ Pairs = lists:zip(L1, L2),
+ true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
+ equiv(K1, K2) and equiv(V1, V2)
+ end, Pairs).
+
+%% Recursively compare tuple elements for equivalence.
+
+equiv_list([], []) ->
+ true;
+equiv_list([V1 | L1], [V2 | L2]) ->
+ case equiv(V1, V2) of
+ true ->
+ equiv_list(L1, L2);
+ false ->
+ false
+ end.
+
+test_all() ->
+ test_one(e2j_test_vec(utf8), 1).
+
+test_one([], _N) ->
+ %% io:format("~p tests passed~n", [N-1]),
+ ok;
+test_one([{E, J} | Rest], N) ->
+ %% io:format("[~p] ~p ~p~n", [N, E, J]),
+ true = equiv(E, decode(J)),
+ true = equiv(E, decode(encode(E))),
+ test_one(Rest, 1+N).
+
+e2j_test_vec(unicode) ->
+ [
+ {"foo" ++ [500] ++ "bar", [$", $f, $o, $o, 500, $b, $a, $r, $"]}
+ ];
+e2j_test_vec(utf8) ->
+ [
+ {1, "1"},
+ {3.1416, "3.14160"}, % text representation may truncate, trail zeroes
+ {-1, "-1"},
+ {-3.1416, "-3.14160"},
+ {12.0e10, "1.20000e+11"},
+ {1.234E+10, "1.23400e+10"},
+ {-1.234E-10, "-1.23400e-10"},
+ {10.0, "1.0e+01"},
+ {123.456, "1.23456E+2"},
+ {10.0, "1e1"},
+ {"foo", "\"foo\""},
+ {"foo" ++ [5] ++ "bar", "\"foo\\u0005bar\""},
+ {"", "\"\""},
+ {"\"", "\"\\\"\""},
+ {"\n\n\n", "\"\\n\\n\\n\""},
+ {"\\", "\"\\\\\""},
+ {"\" \b\f\r\n\t\"", "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
+ {obj_new(), "{}"},
+ {obj_from_list([{"foo", "bar"}]), "{\"foo\":\"bar\"}"},
+ {obj_from_list([{"foo", "bar"}, {"baz", 123}]),
+ "{\"foo\":\"bar\",\"baz\":123}"},
+ {{array, []}, "[]"},
+ {{array, [{array, []}]}, "[[]]"},
+ {{array, [1, "foo"]}, "[1,\"foo\"]"},
+
+ % json array in a json object
+ {obj_from_list([{"foo", {array, [123]}}]),
+ "{\"foo\":[123]}"},
+
+ % json object in a json object
+ {obj_from_list([{"foo", obj_from_list([{"bar", true}])}]),
+ "{\"foo\":{\"bar\":true}}"},
+
+ % fold evaluation order
+ {obj_from_list([{"foo", {array, []}},
+ {"bar", obj_from_list([{"baz", true}])},
+ {"alice", "bob"}]),
+ "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
+
+ % json object in a json array
+ {{array, [-123, "foo", obj_from_list([{"bar", {array, []}}]), null]},
+ "[-123,\"foo\",{\"bar\":[]},null]"}
+ ].
diff --git a/deps/mochiweb-r88/src/mochijson2.erl b/deps/mochiweb-r88/src/mochijson2.erl
new file mode 100644
index 0000000..97961c8
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochijson2.erl
@@ -0,0 +1,597 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
+%% with binaries as strings, arrays as lists (without an {array, _})
+%% wrapper and it only knows how to decode UTF-8 (and ASCII).
+
+-module(mochijson2).
+-author('bob@mochimedia.com').
+-export([encoder/1, encode/1]).
+-export([decoder/1, decode/1]).
+-export([test/0]).
+
+% This is a macro to placate syntax highlighters..
+-define(Q, $\").
+-define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
+ column=N+S#decoder.column}).
+-define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
+ column=1+S#decoder.column}).
+-define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
+ column=1,
+ line=1+S#decoder.line}).
+-define(INC_CHAR(S, C),
+ case C of
+ $\n ->
+ S#decoder{column=1,
+ line=1+S#decoder.line,
+ offset=1+S#decoder.offset};
+ _ ->
+ S#decoder{column=1+S#decoder.column,
+ offset=1+S#decoder.offset}
+ end).
+-define(IS_WHITESPACE(C),
+ (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
+
+%% @type iolist() = [char() | binary() | iolist()]
+%% @type iodata() = iolist() | binary()
+%% @type json_string() = atom | binary()
+%% @type json_number() = integer() | float()
+%% @type json_array() = [json_term()]
+%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_term() = json_string() | json_number() | json_array() |
+%% json_object()
+
+-record(encoder, {handler=null}).
+
+-record(decoder, {object_hook=null,
+ offset=0,
+ line=1,
+ column=1,
+ state=null}).
+
+%% @spec encoder([encoder_option()]) -> function()
+%% @doc Create an encoder/1 with the given options.
+encoder(Options) ->
+ State = parse_encoder_options(Options, #encoder{}),
+ fun (O) -> json_encode(O, State) end.
+
+%% @spec encode(json_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist.
+encode(Any) ->
+ json_encode(Any, #encoder{}).
+
+%% @spec decoder([decoder_option()]) -> function()
+%% @doc Create a decoder/1 with the given options.
+decoder(Options) ->
+ State = parse_decoder_options(Options, #decoder{}),
+ fun (O) -> json_decode(O, State) end.
+
+%% @spec decode(iolist()) -> json_term()
+%% @doc Decode the given iolist to Erlang terms.
+decode(S) ->
+ json_decode(S, #decoder{}).
+
+test() ->
+ test_all().
+
+%% Internal API
+
+parse_encoder_options([], State) ->
+ State;
+parse_encoder_options([{handler, Handler} | Rest], State) ->
+ parse_encoder_options(Rest, State#encoder{handler=Handler}).
+
+parse_decoder_options([], State) ->
+ State;
+parse_decoder_options([{object_hook, Hook} | Rest], State) ->
+ parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
+
+json_encode(true, _State) ->
+ <<"true">>;
+json_encode(false, _State) ->
+ <<"false">>;
+json_encode(null, _State) ->
+ <<"null">>;
+json_encode(I, _State) when is_integer(I) andalso I >= -2147483648 andalso I =< 2147483647 ->
+ %% Anything outside of 32-bit integers should be encoded as a float
+ integer_to_list(I);
+json_encode(I, _State) when is_integer(I) ->
+ mochinum:digits(float(I));
+json_encode(F, _State) when is_float(F) ->
+ mochinum:digits(F);
+json_encode(S, State) when is_binary(S); is_atom(S) ->
+ json_encode_string(S, State);
+json_encode(Array, State) when is_list(Array) ->
+ json_encode_array(Array, State);
+json_encode({struct, Props}, State) when is_list(Props) ->
+ json_encode_proplist(Props, State);
+json_encode(Bad, #encoder{handler=null}) ->
+ exit({json_encode, {bad_term, Bad}});
+json_encode(Bad, State=#encoder{handler=Handler}) ->
+ json_encode(Handler(Bad), State).
+
+json_encode_array([], _State) ->
+ <<"[]">>;
+json_encode_array(L, State) ->
+ F = fun (O, Acc) ->
+ [$,, json_encode(O, State) | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "[", L),
+ lists:reverse([$\] | Acc1]).
+
+json_encode_proplist([], _State) ->
+ <<"{}">>;
+json_encode_proplist(Props, State) ->
+ F = fun ({K, V}, Acc) ->
+ KS = json_encode_string(K, State),
+ VS = json_encode(V, State),
+ [$,, VS, $:, KS | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "{", Props),
+ lists:reverse([$\} | Acc1]).
+
+json_encode_string(A, _State) when is_atom(A) ->
+ L = atom_to_list(A),
+ case json_string_is_safe(L) of
+ true ->
+ [?Q, L, ?Q];
+ false ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(L), [?Q])
+ end;
+json_encode_string(B, _State) when is_binary(B) ->
+ case json_bin_is_safe(B) of
+ true ->
+ [?Q, B, ?Q];
+ false ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(B), [?Q])
+ end;
+json_encode_string(I, _State) when is_integer(I) ->
+ [?Q, integer_to_list(I), ?Q];
+json_encode_string(L, _State) when is_list(L) ->
+ case json_string_is_safe(L) of
+ true ->
+ [?Q, L, ?Q];
+ false ->
+ json_encode_string_unicode(L, [?Q])
+ end.
+
+json_string_is_safe([]) ->
+ true;
+json_string_is_safe([C | Rest]) ->
+ case C of
+ ?Q ->
+ false;
+ $\\ ->
+ false;
+ $\b ->
+ false;
+ $\f ->
+ false;
+ $\n ->
+ false;
+ $\r ->
+ false;
+ $\t ->
+ false;
+ C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ false;
+ C when C < 16#7f ->
+ json_string_is_safe(Rest);
+ _ ->
+ false
+ end.
+
+json_bin_is_safe(<<>>) ->
+ true;
+json_bin_is_safe(<>) ->
+ case C of
+ ?Q ->
+ false;
+ $\\ ->
+ false;
+ $\b ->
+ false;
+ $\f ->
+ false;
+ $\n ->
+ false;
+ $\r ->
+ false;
+ $\t ->
+ false;
+ C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ false;
+ C when C < 16#7f ->
+ json_bin_is_safe(Rest);
+ _ ->
+ false
+ end.
+
+json_encode_string_unicode([], Acc) ->
+ lists:reverse([$\" | Acc]);
+json_encode_string_unicode([C | Cs], Acc) ->
+ Acc1 = case C of
+ ?Q ->
+ [?Q, $\\ | Acc];
+ %% Escaping solidus is only useful when trying to protect
+ %% against "" injection attacks which are only
+ %% possible when JSON is inserted into a HTML document
+ %% in-line. mochijson2 does not protect you from this, so
+ %% if you do insert directly into HTML then you need to
+ %% uncomment the following case or escape the output of encode.
+ %%
+ %% $/ ->
+ %% [$/, $\\ | Acc];
+ %%
+ $\\ ->
+ [$\\, $\\ | Acc];
+ $\b ->
+ [$b, $\\ | Acc];
+ $\f ->
+ [$f, $\\ | Acc];
+ $\n ->
+ [$n, $\\ | Acc];
+ $\r ->
+ [$r, $\\ | Acc];
+ $\t ->
+ [$t, $\\ | Acc];
+ C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ [unihex(C) | Acc];
+ C when C < 16#7f ->
+ [C | Acc];
+ _ ->
+ exit({json_encode, {bad_char, C}})
+ end,
+ json_encode_string_unicode(Cs, Acc1).
+
+hexdigit(C) when C >= 0, C =< 9 ->
+ C + $0;
+hexdigit(C) when C =< 15 ->
+ C + $a - 10.
+
+unihex(C) when C < 16#10000 ->
+ <> = <>,
+ Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
+ [$\\, $u | Digits];
+unihex(C) when C =< 16#10FFFF ->
+ N = C - 16#10000,
+ S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
+ S2 = 16#dc00 bor (N band 16#3ff),
+ [unihex(S1), unihex(S2)].
+
+json_decode(L, S) when is_list(L) ->
+ json_decode(iolist_to_binary(L), S);
+json_decode(B, S) ->
+ {Res, S1} = decode1(B, S),
+ {eof, _} = tokenize(B, S1#decoder{state=trim}),
+ Res.
+
+decode1(B, S=#decoder{state=null}) ->
+ case tokenize(B, S#decoder{state=any}) of
+ {{const, C}, S1} ->
+ {C, S1};
+ {start_array, S1} ->
+ decode_array(B, S1);
+ {start_object, S1} ->
+ decode_object(B, S1)
+ end.
+
+make_object(V, #decoder{object_hook=null}) ->
+ V;
+make_object(V, #decoder{object_hook=Hook}) ->
+ Hook(V).
+
+decode_object(B, S) ->
+ decode_object(B, S#decoder{state=key}, []).
+
+decode_object(B, S=#decoder{state=key}, Acc) ->
+ case tokenize(B, S) of
+ {end_object, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, S1#decoder{state=null}};
+ {{const, K}, S1} ->
+ {colon, S2} = tokenize(B, S1),
+ {V, S3} = decode1(B, S2#decoder{state=null}),
+ decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc])
+ end;
+decode_object(B, S=#decoder{state=comma}, Acc) ->
+ case tokenize(B, S) of
+ {end_object, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, S1#decoder{state=null}};
+ {comma, S1} ->
+ decode_object(B, S1#decoder{state=key}, Acc)
+ end.
+
+decode_array(B, S) ->
+ decode_array(B, S#decoder{state=any}, []).
+
+decode_array(B, S=#decoder{state=any}, Acc) ->
+ case tokenize(B, S) of
+ {end_array, S1} ->
+ {lists:reverse(Acc), S1#decoder{state=null}};
+ {start_array, S1} ->
+ {Array, S2} = decode_array(B, S1),
+ decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
+ {start_object, S1} ->
+ {Array, S2} = decode_object(B, S1),
+ decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
+ {{const, Const}, S1} ->
+ decode_array(B, S1#decoder{state=comma}, [Const | Acc])
+ end;
+decode_array(B, S=#decoder{state=comma}, Acc) ->
+ case tokenize(B, S) of
+ {end_array, S1} ->
+ {lists:reverse(Acc), S1#decoder{state=null}};
+ {comma, S1} ->
+ decode_array(B, S1#decoder{state=any}, Acc)
+ end.
+
+tokenize_string(B, S=#decoder{offset=O}) ->
+ case tokenize_string_fast(B, O) of
+ {escape, O1} ->
+ Length = O1 - O,
+ S1 = ?ADV_COL(S, Length),
+ <<_:O/binary, Head:Length/binary, _/binary>> = B,
+ tokenize_string(B, S1, lists:reverse(binary_to_list(Head)));
+ O1 ->
+ Length = O1 - O,
+ <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B,
+ {{const, String}, ?ADV_COL(S, Length + 1)}
+ end.
+
+tokenize_string_fast(B, O) ->
+ case B of
+ <<_:O/binary, ?Q, _/binary>> ->
+ O;
+ <<_:O/binary, C, _/binary>> when C =/= $\\ ->
+ tokenize_string_fast(B, 1 + O);
+ _ ->
+ {escape, O}
+ end.
+
+tokenize_string(B, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, ?Q, _/binary>> ->
+ {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)};
+ <<_:O/binary, "\\\"", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]);
+ <<_:O/binary, "\\\\", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]);
+ <<_:O/binary, "\\/", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]);
+ <<_:O/binary, "\\b", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]);
+ <<_:O/binary, "\\f", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]);
+ <<_:O/binary, "\\n", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]);
+ <<_:O/binary, "\\r", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]);
+ <<_:O/binary, "\\t", _/binary>> ->
+ tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]);
+ <<_:O/binary, "\\u", C3, C2, C1, C0, _/binary>> ->
+ %% coalesce UTF-16 surrogate pair?
+ C = erlang:list_to_integer([C3, C2, C1, C0], 16),
+ Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc),
+ tokenize_string(B, ?ADV_COL(S, 6), Acc1);
+ <<_:O/binary, C, _/binary>> ->
+ tokenize_string(B, ?INC_CHAR(S, C), [C | Acc])
+ end.
+
+tokenize_number(B, S) ->
+ case tokenize_number(B, sign, S, []) of
+ {{int, Int}, S1} ->
+ {{const, list_to_integer(Int)}, S1};
+ {{float, Float}, S1} ->
+ {{const, list_to_float(Float)}, S1}
+ end.
+
+tokenize_number(B, sign, S=#decoder{offset=O}, []) ->
+ case B of
+ <<_:O/binary, $-, _/binary>> ->
+ tokenize_number(B, int, ?INC_COL(S), [$-]);
+ _ ->
+ tokenize_number(B, int, S, [])
+ end;
+tokenize_number(B, int, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, $0, _/binary>> ->
+ tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]);
+ <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 ->
+ tokenize_number(B, int1, ?INC_COL(S), [C | Acc])
+ end;
+tokenize_number(B, int1, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+ tokenize_number(B, int1, ?INC_COL(S), [C | Acc]);
+ _ ->
+ tokenize_number(B, frac, S, Acc)
+ end;
+tokenize_number(B, frac, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 ->
+ tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
+ <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
+ tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
+ _ ->
+ {{int, lists:reverse(Acc)}, S}
+ end;
+tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+ tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]);
+ <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
+ tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]);
+ _ ->
+ {{float, lists:reverse(Acc)}, S}
+ end;
+tokenize_number(B, esign, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ ->
+ tokenize_number(B, eint, ?INC_COL(S), [C | Acc]);
+ _ ->
+ tokenize_number(B, eint, S, Acc)
+ end;
+tokenize_number(B, eint, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+ tokenize_number(B, eint1, ?INC_COL(S), [C | Acc])
+ end;
+tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+ tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]);
+ _ ->
+ {{float, lists:reverse(Acc)}, S}
+ end.
+
+tokenize(B, S=#decoder{offset=O}) ->
+ case B of
+ <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
+ tokenize(B, ?INC_CHAR(S, C));
+ <<_:O/binary, "{", _/binary>> ->
+ {start_object, ?INC_COL(S)};
+ <<_:O/binary, "}", _/binary>> ->
+ {end_object, ?INC_COL(S)};
+ <<_:O/binary, "[", _/binary>> ->
+ {start_array, ?INC_COL(S)};
+ <<_:O/binary, "]", _/binary>> ->
+ {end_array, ?INC_COL(S)};
+ <<_:O/binary, ",", _/binary>> ->
+ {comma, ?INC_COL(S)};
+ <<_:O/binary, ":", _/binary>> ->
+ {colon, ?INC_COL(S)};
+ <<_:O/binary, "null", _/binary>> ->
+ {{const, null}, ?ADV_COL(S, 4)};
+ <<_:O/binary, "true", _/binary>> ->
+ {{const, true}, ?ADV_COL(S, 4)};
+ <<_:O/binary, "false", _/binary>> ->
+ {{const, false}, ?ADV_COL(S, 5)};
+ <<_:O/binary, "\"", _/binary>> ->
+ tokenize_string(B, ?INC_COL(S));
+ <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9)
+ orelse C =:= $- ->
+ tokenize_number(B, S);
+ <<_:O/binary>> ->
+ trim = S#decoder.state,
+ {eof, S}
+ end.
+
+%% testing constructs borrowed from the Yaws JSON implementation.
+
+%% Create an object from a list of Key/Value pairs.
+
+obj_new() ->
+ {struct, []}.
+
+is_obj({struct, Props}) ->
+ F = fun ({K, _}) when is_binary(K) ->
+ true;
+ (_) ->
+ false
+ end,
+ lists:all(F, Props).
+
+obj_from_list(Props) ->
+ Obj = {struct, Props},
+ case is_obj(Obj) of
+ true -> Obj;
+ false -> exit({json_bad_object, Obj})
+ end.
+
+%% Test for equivalence of Erlang terms.
+%% Due to arbitrary order of construction, equivalent objects might
+%% compare unequal as erlang terms, so we need to carefully recurse
+%% through aggregates (tuples and objects).
+
+equiv({struct, Props1}, {struct, Props2}) ->
+ equiv_object(Props1, Props2);
+equiv(L1, L2) when is_list(L1), is_list(L2) ->
+ equiv_list(L1, L2);
+equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
+equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
+equiv(true, true) -> true;
+equiv(false, false) -> true;
+equiv(null, null) -> true.
+
+%% Object representation and traversal order is unknown.
+%% Use the sledgehammer and sort property lists.
+
+equiv_object(Props1, Props2) ->
+ L1 = lists:keysort(1, Props1),
+ L2 = lists:keysort(1, Props2),
+ Pairs = lists:zip(L1, L2),
+ true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
+ equiv(K1, K2) and equiv(V1, V2)
+ end, Pairs).
+
+%% Recursively compare tuple elements for equivalence.
+
+equiv_list([], []) ->
+ true;
+equiv_list([V1 | L1], [V2 | L2]) ->
+ case equiv(V1, V2) of
+ true ->
+ equiv_list(L1, L2);
+ false ->
+ false
+ end.
+
+test_all() ->
+ [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
+ test_one(e2j_test_vec(utf8), 1).
+
+test_one([], _N) ->
+ %% io:format("~p tests passed~n", [N-1]),
+ ok;
+test_one([{E, J} | Rest], N) ->
+ %% io:format("[~p] ~p ~p~n", [N, E, J]),
+ true = equiv(E, decode(J)),
+ true = equiv(E, decode(encode(E))),
+ test_one(Rest, 1+N).
+
+e2j_test_vec(utf8) ->
+ [
+ {1, "1"},
+ {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes
+ {-1, "-1"},
+ {-3.1416, "-3.14160"},
+ {12.0e10, "1.20000e+11"},
+ {1.234E+10, "1.23400e+10"},
+ {-1.234E-10, "-1.23400e-10"},
+ {10.0, "1.0e+01"},
+ {123.456, "1.23456E+2"},
+ {10.0, "1e1"},
+ {<<"foo">>, "\"foo\""},
+ {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""},
+ {<<"">>, "\"\""},
+ {<<"\n\n\n">>, "\"\\n\\n\\n\""},
+ {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
+ {obj_new(), "{}"},
+ {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"},
+ {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]),
+ "{\"foo\":\"bar\",\"baz\":123}"},
+ {[], "[]"},
+ {[[]], "[[]]"},
+ {[1, <<"foo">>], "[1,\"foo\"]"},
+
+ %% json array in a json object
+ {obj_from_list([{<<"foo">>, [123]}]),
+ "{\"foo\":[123]}"},
+
+ %% json object in a json object
+ {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]),
+ "{\"foo\":{\"bar\":true}}"},
+
+ %% fold evaluation order
+ {obj_from_list([{<<"foo">>, []},
+ {<<"bar">>, obj_from_list([{<<"baz">>, true}])},
+ {<<"alice">>, <<"bob">>}]),
+ "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
+
+ %% json object in a json array
+ {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null],
+ "[-123,\"foo\",{\"bar\":[]},null]"}
+ ].
diff --git a/deps/mochiweb-r88/src/mochinum.erl b/deps/mochiweb-r88/src/mochinum.erl
new file mode 100644
index 0000000..4f88f9a
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochinum.erl
@@ -0,0 +1,289 @@
+%% @copyright 2007 Mochi Media, Inc.
+%% @author Bob Ippolito
+
+%% @doc Useful numeric algorithms for floats that cover some deficiencies
+%% in the math module. More interesting is digits/1, which implements
+%% the algorithm from:
+%% http://www.cs.indiana.edu/~burger/fp/index.html
+%% See also "Printing Floating-Point Numbers Quickly and Accurately"
+%% in Proceedings of the SIGPLAN '96 Conference on Programming Language
+%% Design and Implementation.
+
+-module(mochinum).
+-author("Bob Ippolito ").
+-export([digits/1, frexp/1, int_pow/2, int_ceil/1, test/0]).
+
+%% IEEE 754 Float exponent bias
+-define(FLOAT_BIAS, 1022).
+-define(MIN_EXP, -1074).
+-define(BIG_POW, 4503599627370496).
+
+%% External API
+
+%% @spec digits(number()) -> string()
+%% @doc Returns a string that accurately represents the given integer or float
+%% using a conservative amount of digits. Great for generating
+%% human-readable output, or compact ASCII serializations for floats.
+digits(N) when is_integer(N) ->
+ integer_to_list(N);
+digits(0.0) ->
+ "0.0";
+digits(Float) ->
+ {Frac, Exp} = frexp(Float),
+ Exp1 = Exp - 53,
+ Frac1 = trunc(abs(Frac) * (1 bsl 53)),
+ [Place | Digits] = digits1(Float, Exp1, Frac1),
+ R = insert_decimal(Place, [$0 + D || D <- Digits]),
+ case Float < 0 of
+ true ->
+ [$- | R];
+ _ ->
+ R
+ end.
+
+%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()}
+%% @doc Return the fractional and exponent part of an IEEE 754 double,
+%% equivalent to the libc function of the same name.
+%% F = Frac * pow(2, Exp).
+frexp(F) ->
+ frexp1(unpack(F)).
+
+%% @spec int_pow(X::integer(), N::integer()) -> Y::integer()
+%% @doc Moderately efficient way to exponentiate integers.
+%% int_pow(10, 2) = 100.
+int_pow(_X, 0) ->
+ 1;
+int_pow(X, N) when N > 0 ->
+ int_pow(X, N, 1).
+
+%% @spec int_ceil(F::float()) -> integer()
+%% @doc Return the ceiling of F as an integer. The ceiling is defined as
+%% F when F == trunc(F);
+%% trunc(F) when F < 0;
+%% trunc(F) + 1 when F > 0.
+int_ceil(X) ->
+ T = trunc(X),
+ case (X - T) of
+ Neg when Neg < 0 -> T;
+ Pos when Pos > 0 -> T + 1;
+ _ -> T
+ end.
+
+
+%% Internal API
+
+int_pow(X, N, R) when N < 2 ->
+ R * X;
+int_pow(X, N, R) ->
+ int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end).
+
+insert_decimal(0, S) ->
+ "0." ++ S;
+insert_decimal(Place, S) when Place > 0 ->
+ L = length(S),
+ case Place - L of
+ 0 ->
+ S ++ ".0";
+ N when N < 0 ->
+ {S0, S1} = lists:split(L + N, S),
+ S0 ++ "." ++ S1;
+ N when N < 6 ->
+ %% More places than digits
+ S ++ lists:duplicate(N, $0) ++ ".0";
+ _ ->
+ insert_decimal_exp(Place, S)
+ end;
+insert_decimal(Place, S) when Place > -6 ->
+ "0." ++ lists:duplicate(abs(Place), $0) ++ S;
+insert_decimal(Place, S) ->
+ insert_decimal_exp(Place, S).
+
+insert_decimal_exp(Place, S) ->
+ [C | S0] = S,
+ S1 = case S0 of
+ [] ->
+ "0";
+ _ ->
+ S0
+ end,
+ Exp = case Place < 0 of
+ true ->
+ "e-";
+ false ->
+ "e+"
+ end,
+ [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)).
+
+
+digits1(Float, Exp, Frac) ->
+ Round = ((Frac band 1) =:= 0),
+ case Exp >= 0 of
+ true ->
+ BExp = 1 bsl Exp,
+ case (Frac /= ?BIG_POW) of
+ true ->
+ scale((Frac * BExp * 2), 2, BExp, BExp,
+ Round, Round, Float);
+ false ->
+ scale((Frac * BExp * 4), 4, (BExp * 2), BExp,
+ Round, Round, Float)
+ end;
+ false ->
+ case (Exp == ?MIN_EXP) orelse (Frac /= ?BIG_POW) of
+ true ->
+ scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
+ Round, Round, Float);
+ false ->
+ scale((Frac * 4), 1 bsl (2 - Exp), 2, 1,
+ Round, Round, Float)
+ end
+ end.
+
+scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) ->
+ Est = int_ceil(math:log10(abs(Float)) - 1.0e-10),
+ %% Note that the scheme implementation uses a 326 element look-up table
+ %% for int_pow(10, N) where we do not.
+ case Est >= 0 of
+ true ->
+ fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est,
+ LowOk, HighOk);
+ false ->
+ Scale = int_pow(10, -Est),
+ fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est,
+ LowOk, HighOk)
+ end.
+
+fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) ->
+ TooLow = case HighOk of
+ true ->
+ (R + MPlus) >= S;
+ false ->
+ (R + MPlus) > S
+ end,
+ case TooLow of
+ true ->
+ [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)];
+ false ->
+ [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)]
+ end.
+
+generate(R0, S, MPlus, MMinus, LowOk, HighOk) ->
+ D = R0 div S,
+ R = R0 rem S,
+ TC1 = case LowOk of
+ true ->
+ R =< MMinus;
+ false ->
+ R < MMinus
+ end,
+ TC2 = case HighOk of
+ true ->
+ (R + MPlus) >= S;
+ false ->
+ (R + MPlus) > S
+ end,
+ case TC1 of
+ false ->
+ case TC2 of
+ false ->
+ [D | generate(R * 10, S, MPlus * 10, MMinus * 10,
+ LowOk, HighOk)];
+ true ->
+ [D + 1]
+ end;
+ true ->
+ case TC2 of
+ false ->
+ [D];
+ true ->
+ case R * 2 < S of
+ true ->
+ [D];
+ false ->
+ [D + 1]
+ end
+ end
+ end.
+
+unpack(Float) ->
+ <> = <>,
+ {Sign, Exp, Frac}.
+
+frexp1({_Sign, 0, 0}) ->
+ {0.0, 0};
+frexp1({Sign, 0, Frac}) ->
+ Exp = log2floor(Frac),
+ <> = <>,
+ {Frac1, -(?FLOAT_BIAS) - 52 + Exp};
+frexp1({Sign, Exp, Frac}) ->
+ <> = <>,
+ {Frac1, Exp - ?FLOAT_BIAS}.
+
+log2floor(Int) ->
+ log2floor(Int, 0).
+
+log2floor(0, N) ->
+ N;
+log2floor(Int, N) ->
+ log2floor(Int bsr 1, 1 + N).
+
+
+test() ->
+ ok = test_frexp(),
+ ok = test_int_ceil(),
+ ok = test_int_pow(),
+ ok = test_digits(),
+ ok.
+
+test_int_ceil() ->
+ 1 = int_ceil(0.0001),
+ 0 = int_ceil(0.0),
+ 1 = int_ceil(0.99),
+ 1 = int_ceil(1.0),
+ -1 = int_ceil(-1.5),
+ -2 = int_ceil(-2.0),
+ ok.
+
+test_int_pow() ->
+ 1 = int_pow(1, 1),
+ 1 = int_pow(1, 0),
+ 1 = int_pow(10, 0),
+ 10 = int_pow(10, 1),
+ 100 = int_pow(10, 2),
+ 1000 = int_pow(10, 3),
+ ok.
+
+test_digits() ->
+ "0" = digits(0),
+ "0.0" = digits(0.0),
+ "1.0" = digits(1.0),
+ "-1.0" = digits(-1.0),
+ "0.1" = digits(0.1),
+ "0.01" = digits(0.01),
+ "0.001" = digits(0.001),
+ ok.
+
+test_frexp() ->
+ %% zero
+ {0.0, 0} = frexp(0.0),
+ %% one
+ {0.5, 1} = frexp(1.0),
+ %% negative one
+ {-0.5, 1} = frexp(-1.0),
+ %% small denormalized number
+ %% 4.94065645841246544177e-324
+ <> = <<0,0,0,0,0,0,0,1>>,
+ {0.5, -1073} = frexp(SmallDenorm),
+ %% large denormalized number
+ %% 2.22507385850720088902e-308
+ <> = <<0,15,255,255,255,255,255,255>>,
+ {0.99999999999999978, -1022} = frexp(BigDenorm),
+ %% small normalized number
+ %% 2.22507385850720138309e-308
+ <> = <<0,16,0,0,0,0,0,0>>,
+ {0.5, -1021} = frexp(SmallNorm),
+ %% large normalized number
+ %% 1.79769313486231570815e+308
+ <> = <<127,239,255,255,255,255,255,255>>,
+ {0.99999999999999989, 1024} = frexp(LargeNorm),
+ ok.
diff --git a/deps/mochiweb-r88/src/mochiweb.app b/deps/mochiweb-r88/src/mochiweb.app
new file mode 100644
index 0000000..cd8dbb2
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb.app
@@ -0,0 +1,32 @@
+{application, mochiweb,
+ [{description, "MochiMedia Web Server"},
+ {vsn, "0.01"},
+ {modules, [
+ mochihex,
+ mochijson,
+ mochijson2,
+ mochinum,
+ mochiweb,
+ mochiweb_app,
+ mochiweb_charref,
+ mochiweb_cookies,
+ mochiweb_echo,
+ mochiweb_headers,
+ mochiweb_html,
+ mochiweb_http,
+ mochiweb_multipart,
+ mochiweb_request,
+ mochiweb_response,
+ mochiweb_skel,
+ mochiweb_socket_server,
+ mochiweb_sup,
+ mochiweb_util,
+ reloader,
+ mochifmt,
+ mochifmt_std,
+ mochifmt_records
+ ]},
+ {registered, []},
+ {mod, {mochiweb_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib]}]}.
diff --git a/deps/mochiweb-r88/src/mochiweb.erl b/deps/mochiweb-r88/src/mochiweb.erl
new file mode 100644
index 0000000..0f4d52a
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb.erl
@@ -0,0 +1,110 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Start and stop the MochiWeb server.
+
+-module(mochiweb).
+-author('bob@mochimedia.com').
+
+-export([start/0, stop/0]).
+-export([new_request/1, new_response/1]).
+-export([all_loaded/0, all_loaded/1, reload/0]).
+-export([test/0]).
+
+%% @spec start() -> ok
+%% @doc Start the MochiWeb server.
+start() ->
+ ensure_started(crypto),
+ application:start(mochiweb).
+
+%% @spec stop() -> ok
+%% @doc Stop the MochiWeb server.
+stop() ->
+ Res = application:stop(mochiweb),
+ application:stop(crypto),
+ Res.
+
+%% @spec test() -> ok
+%% @doc Run all of the tests for MochiWeb.
+test() ->
+ mochiweb_util:test(),
+ mochiweb_headers:test(),
+ mochiweb_cookies:test(),
+ mochihex:test(),
+ mochinum:test(),
+ mochijson:test(),
+ mochiweb_charref:test(),
+ mochiweb_html:test(),
+ mochifmt:test(),
+ test_request(),
+ ok.
+
+reload() ->
+ [c:l(Module) || Module <- all_loaded()].
+
+all_loaded() ->
+ all_loaded(filename:dirname(code:which(?MODULE))).
+
+all_loaded(Base) when is_atom(Base) ->
+ [];
+all_loaded(Base) ->
+ FullBase = Base ++ "/",
+ F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) ->
+ Acc;
+ ({Module, Loaded}, Acc) ->
+ case lists:prefix(FullBase, Loaded) of
+ true ->
+ [Module | Acc];
+ false ->
+ Acc
+ end
+ end,
+ lists:foldl(F, [], code:all_loaded()).
+
+
+%% @spec new_request({Socket, Request, Headers}) -> MochiWebRequest
+%% @doc Return a mochiweb_request data structure.
+new_request({Socket, {Method, {abs_path, Uri}, Version}, Headers}) ->
+ mochiweb_request:new(Socket,
+ Method,
+ Uri,
+ Version,
+ mochiweb_headers:make(Headers));
+% this case probably doesn't "exist".
+new_request({Socket, {Method, {absoluteURI, _Protocol, _Host, _Port, Uri},
+ Version}, Headers}) ->
+ mochiweb_request:new(Socket,
+ Method,
+ Uri,
+ Version,
+ mochiweb_headers:make(Headers));
+%% Request-URI is "*"
+%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+new_request({Socket, {Method, '*'=Uri, Version}, Headers}) ->
+ mochiweb_request:new(Socket,
+ Method,
+ Uri,
+ Version,
+ mochiweb_headers:make(Headers)).
+
+%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse
+%% @doc Return a mochiweb_response data structure.
+new_response({Request, Code, Headers}) ->
+ mochiweb_response:new(Request,
+ Code,
+ mochiweb_headers:make(Headers)).
+
+%% Internal API
+
+test_request() ->
+ R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
+ "/foo/bar/baz wibble quux" = R:get(path),
+ ok.
+
+ensure_started(App) ->
+ case application:start(App) of
+ ok ->
+ ok;
+ {error, {already_started, App}} ->
+ ok
+ end.
diff --git a/deps/mochiweb-r88/src/mochiweb_app.erl b/deps/mochiweb-r88/src/mochiweb_app.erl
new file mode 100644
index 0000000..2b437f6
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_app.erl
@@ -0,0 +1,20 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Callbacks for the mochiweb application.
+
+-module(mochiweb_app).
+-author('bob@mochimedia.com').
+
+-behaviour(application).
+-export([start/2,stop/1]).
+
+%% @spec start(_Type, _StartArgs) -> ServerRet
+%% @doc application start callback for mochiweb.
+start(_Type, _StartArgs) ->
+ mochiweb_sup:start_link().
+
+%% @spec stop(_State) -> ServerRet
+%% @doc application stop callback for mochiweb.
+stop(_State) ->
+ ok.
diff --git a/deps/mochiweb-r88/src/mochiweb_charref.erl b/deps/mochiweb-r88/src/mochiweb_charref.erl
new file mode 100644
index 0000000..59fd4a4
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_charref.erl
@@ -0,0 +1,295 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Converts HTML 4 charrefs and entities to codepoints.
+-module(mochiweb_charref).
+-export([charref/1, test/0]).
+
+%% External API.
+
+%% @spec charref(S) -> integer() | undefined
+%% @doc Convert a decimal charref, hex charref, or html entity to a unicode
+%% codepoint, or return undefined on failure.
+%% The input should not include an ampersand or semicolon.
+%% charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38.
+charref(B) when is_binary(B) ->
+ charref(binary_to_list(B));
+charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
+ try erlang:list_to_integer(L, 16)
+ catch
+ error:badarg -> undefined
+ end;
+charref([$# | L]) ->
+ try list_to_integer(L)
+ catch
+ error:badarg -> undefined
+ end;
+charref(L) ->
+ entity(L).
+
+%% @spec test() -> ok
+%% @doc Run tests for mochiweb_charref.
+test() ->
+ 1234 = charref("#1234"),
+ 255 = charref("#xfF"),
+ 255 = charref("#XFf"),
+ 38 = charref("amp"),
+ undefined = charref("not_an_entity"),
+ ok.
+
+%% Internal API.
+
+entity("nbsp") -> 160;
+entity("iexcl") -> 161;
+entity("cent") -> 162;
+entity("pound") -> 163;
+entity("curren") -> 164;
+entity("yen") -> 165;
+entity("brvbar") -> 166;
+entity("sect") -> 167;
+entity("uml") -> 168;
+entity("copy") -> 169;
+entity("ordf") -> 170;
+entity("laquo") -> 171;
+entity("not") -> 172;
+entity("shy") -> 173;
+entity("reg") -> 174;
+entity("macr") -> 175;
+entity("deg") -> 176;
+entity("plusmn") -> 177;
+entity("sup2") -> 178;
+entity("sup3") -> 179;
+entity("acute") -> 180;
+entity("micro") -> 181;
+entity("para") -> 182;
+entity("middot") -> 183;
+entity("cedil") -> 184;
+entity("sup1") -> 185;
+entity("ordm") -> 186;
+entity("raquo") -> 187;
+entity("frac14") -> 188;
+entity("frac12") -> 189;
+entity("frac34") -> 190;
+entity("iquest") -> 191;
+entity("Agrave") -> 192;
+entity("Aacute") -> 193;
+entity("Acirc") -> 194;
+entity("Atilde") -> 195;
+entity("Auml") -> 196;
+entity("Aring") -> 197;
+entity("AElig") -> 198;
+entity("Ccedil") -> 199;
+entity("Egrave") -> 200;
+entity("Eacute") -> 201;
+entity("Ecirc") -> 202;
+entity("Euml") -> 203;
+entity("Igrave") -> 204;
+entity("Iacute") -> 205;
+entity("Icirc") -> 206;
+entity("Iuml") -> 207;
+entity("ETH") -> 208;
+entity("Ntilde") -> 209;
+entity("Ograve") -> 210;
+entity("Oacute") -> 211;
+entity("Ocirc") -> 212;
+entity("Otilde") -> 213;
+entity("Ouml") -> 214;
+entity("times") -> 215;
+entity("Oslash") -> 216;
+entity("Ugrave") -> 217;
+entity("Uacute") -> 218;
+entity("Ucirc") -> 219;
+entity("Uuml") -> 220;
+entity("Yacute") -> 221;
+entity("THORN") -> 222;
+entity("szlig") -> 223;
+entity("agrave") -> 224;
+entity("aacute") -> 225;
+entity("acirc") -> 226;
+entity("atilde") -> 227;
+entity("auml") -> 228;
+entity("aring") -> 229;
+entity("aelig") -> 230;
+entity("ccedil") -> 231;
+entity("egrave") -> 232;
+entity("eacute") -> 233;
+entity("ecirc") -> 234;
+entity("euml") -> 235;
+entity("igrave") -> 236;
+entity("iacute") -> 237;
+entity("icirc") -> 238;
+entity("iuml") -> 239;
+entity("eth") -> 240;
+entity("ntilde") -> 241;
+entity("ograve") -> 242;
+entity("oacute") -> 243;
+entity("ocirc") -> 244;
+entity("otilde") -> 245;
+entity("ouml") -> 246;
+entity("divide") -> 247;
+entity("oslash") -> 248;
+entity("ugrave") -> 249;
+entity("uacute") -> 250;
+entity("ucirc") -> 251;
+entity("uuml") -> 252;
+entity("yacute") -> 253;
+entity("thorn") -> 254;
+entity("yuml") -> 255;
+entity("fnof") -> 402;
+entity("Alpha") -> 913;
+entity("Beta") -> 914;
+entity("Gamma") -> 915;
+entity("Delta") -> 916;
+entity("Epsilon") -> 917;
+entity("Zeta") -> 918;
+entity("Eta") -> 919;
+entity("Theta") -> 920;
+entity("Iota") -> 921;
+entity("Kappa") -> 922;
+entity("Lambda") -> 923;
+entity("Mu") -> 924;
+entity("Nu") -> 925;
+entity("Xi") -> 926;
+entity("Omicron") -> 927;
+entity("Pi") -> 928;
+entity("Rho") -> 929;
+entity("Sigma") -> 931;
+entity("Tau") -> 932;
+entity("Upsilon") -> 933;
+entity("Phi") -> 934;
+entity("Chi") -> 935;
+entity("Psi") -> 936;
+entity("Omega") -> 937;
+entity("alpha") -> 945;
+entity("beta") -> 946;
+entity("gamma") -> 947;
+entity("delta") -> 948;
+entity("epsilon") -> 949;
+entity("zeta") -> 950;
+entity("eta") -> 951;
+entity("theta") -> 952;
+entity("iota") -> 953;
+entity("kappa") -> 954;
+entity("lambda") -> 955;
+entity("mu") -> 956;
+entity("nu") -> 957;
+entity("xi") -> 958;
+entity("omicron") -> 959;
+entity("pi") -> 960;
+entity("rho") -> 961;
+entity("sigmaf") -> 962;
+entity("sigma") -> 963;
+entity("tau") -> 964;
+entity("upsilon") -> 965;
+entity("phi") -> 966;
+entity("chi") -> 967;
+entity("psi") -> 968;
+entity("omega") -> 969;
+entity("thetasym") -> 977;
+entity("upsih") -> 978;
+entity("piv") -> 982;
+entity("bull") -> 8226;
+entity("hellip") -> 8230;
+entity("prime") -> 8242;
+entity("Prime") -> 8243;
+entity("oline") -> 8254;
+entity("frasl") -> 8260;
+entity("weierp") -> 8472;
+entity("image") -> 8465;
+entity("real") -> 8476;
+entity("trade") -> 8482;
+entity("alefsym") -> 8501;
+entity("larr") -> 8592;
+entity("uarr") -> 8593;
+entity("rarr") -> 8594;
+entity("darr") -> 8595;
+entity("harr") -> 8596;
+entity("crarr") -> 8629;
+entity("lArr") -> 8656;
+entity("uArr") -> 8657;
+entity("rArr") -> 8658;
+entity("dArr") -> 8659;
+entity("hArr") -> 8660;
+entity("forall") -> 8704;
+entity("part") -> 8706;
+entity("exist") -> 8707;
+entity("empty") -> 8709;
+entity("nabla") -> 8711;
+entity("isin") -> 8712;
+entity("notin") -> 8713;
+entity("ni") -> 8715;
+entity("prod") -> 8719;
+entity("sum") -> 8721;
+entity("minus") -> 8722;
+entity("lowast") -> 8727;
+entity("radic") -> 8730;
+entity("prop") -> 8733;
+entity("infin") -> 8734;
+entity("ang") -> 8736;
+entity("and") -> 8743;
+entity("or") -> 8744;
+entity("cap") -> 8745;
+entity("cup") -> 8746;
+entity("int") -> 8747;
+entity("there4") -> 8756;
+entity("sim") -> 8764;
+entity("cong") -> 8773;
+entity("asymp") -> 8776;
+entity("ne") -> 8800;
+entity("equiv") -> 8801;
+entity("le") -> 8804;
+entity("ge") -> 8805;
+entity("sub") -> 8834;
+entity("sup") -> 8835;
+entity("nsub") -> 8836;
+entity("sube") -> 8838;
+entity("supe") -> 8839;
+entity("oplus") -> 8853;
+entity("otimes") -> 8855;
+entity("perp") -> 8869;
+entity("sdot") -> 8901;
+entity("lceil") -> 8968;
+entity("rceil") -> 8969;
+entity("lfloor") -> 8970;
+entity("rfloor") -> 8971;
+entity("lang") -> 9001;
+entity("rang") -> 9002;
+entity("loz") -> 9674;
+entity("spades") -> 9824;
+entity("clubs") -> 9827;
+entity("hearts") -> 9829;
+entity("diams") -> 9830;
+entity("quot") -> 34;
+entity("amp") -> 38;
+entity("lt") -> 60;
+entity("gt") -> 62;
+entity("OElig") -> 338;
+entity("oelig") -> 339;
+entity("Scaron") -> 352;
+entity("scaron") -> 353;
+entity("Yuml") -> 376;
+entity("circ") -> 710;
+entity("tilde") -> 732;
+entity("ensp") -> 8194;
+entity("emsp") -> 8195;
+entity("thinsp") -> 8201;
+entity("zwnj") -> 8204;
+entity("zwj") -> 8205;
+entity("lrm") -> 8206;
+entity("rlm") -> 8207;
+entity("ndash") -> 8211;
+entity("mdash") -> 8212;
+entity("lsquo") -> 8216;
+entity("rsquo") -> 8217;
+entity("sbquo") -> 8218;
+entity("ldquo") -> 8220;
+entity("rdquo") -> 8221;
+entity("bdquo") -> 8222;
+entity("dagger") -> 8224;
+entity("Dagger") -> 8225;
+entity("permil") -> 8240;
+entity("lsaquo") -> 8249;
+entity("rsaquo") -> 8250;
+entity("euro") -> 8364;
+entity(_) -> undefined.
+
diff --git a/deps/mochiweb-r88/src/mochiweb_cookies.erl b/deps/mochiweb-r88/src/mochiweb_cookies.erl
new file mode 100644
index 0000000..acea12a
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_cookies.erl
@@ -0,0 +1,250 @@
+%% @author Emad El-Haraty
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965).
+
+-module(mochiweb_cookies).
+-export([parse_cookie/1, cookie/3, cookie/2, test/0]).
+
+-define(QUOTE, $\").
+
+-define(IS_WHITESPACE(C),
+ (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
+
+%% RFC 2616 separators (called tspecials in RFC 2068)
+-define(IS_SEPARATOR(C),
+ (C < 32 orelse
+ C =:= $\s orelse C =:= $\t orelse
+ C =:= $( orelse C =:= $) orelse C =:= $< orelse C =:= $> orelse
+ C =:= $@ orelse C =:= $, orelse C =:= $; orelse C =:= $: orelse
+ C =:= $\\ orelse C =:= $\" orelse C =:= $/ orelse
+ C =:= $[ orelse C =:= $] orelse C =:= $? orelse C =:= $= orelse
+ C =:= ${ orelse C =:= $})).
+
+%% @type proplist() = [{Key::string(), Value::string()}].
+%% @type header() = {Name::string(), Value::string()}.
+
+%% @spec cookie(Key::string(), Value::string()) -> header()
+%% @doc Short-hand for cookie(Key, Value, [])
.
+cookie(Key, Value) ->
+ cookie(Key, Value, []).
+
+%% @spec cookie(Key::string(), Value::string(), Options::[Option]) -> header()
+%% where Option = {max_age, integer()} | {local_time, {date(), time()}}
+%% | {domain, string()} | {path, string()}
+%% | {secure, true | false}
+%%
+%% @doc Generate a Set-Cookie header field tuple.
+cookie(Key, Value, Options) ->
+ Cookie = [any_to_list(Key), "=", quote(Value), "; Version=1"],
+ %% Set-Cookie:
+ %% Comment, Domain, Max-Age, Path, Secure, Version
+ %% Set-Cookie2:
+ %% Comment, CommentURL, Discard, Domain, Max-Age, Path, Port, Secure,
+ %% Version
+ ExpiresPart =
+ case proplists:get_value(max_age, Options) of
+ undefined ->
+ "";
+ RawAge ->
+ When = case proplists:get_value(local_time, Options) of
+ undefined ->
+ calendar:local_time();
+ LocalTime ->
+ LocalTime
+ end,
+ Age = case RawAge < 0 of
+ true ->
+ 0;
+ false ->
+ RawAge
+ end,
+ ["; Expires=", age_to_cookie_date(Age, When),
+ "; Max-Age=", quote(Age)]
+ end,
+ SecurePart =
+ case proplists:get_value(secure, Options) of
+ true ->
+ "; Secure";
+ _ ->
+ ""
+ end,
+ DomainPart =
+ case proplists:get_value(domain, Options) of
+ undefined ->
+ "";
+ Domain ->
+ ["; Domain=", quote(Domain)]
+ end,
+ PathPart =
+ case proplists:get_value(path, Options) of
+ undefined ->
+ "";
+ Path ->
+ ["; Path=", quote(Path)]
+ end,
+ CookieParts = [Cookie, ExpiresPart, SecurePart, DomainPart, PathPart],
+ {"Set-Cookie", lists:flatten(CookieParts)}.
+
+
+%% Every major browser incorrectly handles quoted strings in a
+%% different and (worse) incompatible manner. Instead of wasting time
+%% writing redundant code for each browser, we restrict cookies to
+%% only contain characters that browsers handle compatibly.
+%%
+%% By replacing the definition of quote with this, we generate
+%% RFC-compliant cookies:
+%%
+%% quote(V) ->
+%% Fun = fun(?QUOTE, Acc) -> [$\\, ?QUOTE | Acc];
+%% (Ch, Acc) -> [Ch | Acc]
+%% end,
+%% [?QUOTE | lists:foldr(Fun, [?QUOTE], V)].
+
+%% Convert to a string and raise an error if quoting is required.
+quote(V0) ->
+ V = any_to_list(V0),
+ lists:all(fun(Ch) -> Ch =:= $/ orelse not ?IS_SEPARATOR(Ch) end, V)
+ orelse erlang:error({cookie_quoting_required, V}),
+ V.
+
+add_seconds(Secs, LocalTime) ->
+ Greg = calendar:datetime_to_gregorian_seconds(LocalTime),
+ calendar:gregorian_seconds_to_datetime(Greg + Secs).
+
+age_to_cookie_date(Age, LocalTime) ->
+ httpd_util:rfc1123_date(add_seconds(Age, LocalTime)).
+
+%% @spec parse_cookie(string()) -> [{K::string(), V::string()}]
+%% @doc Parse the contents of a Cookie header field, ignoring cookie
+%% attributes, and return a simple property list.
+parse_cookie("") ->
+ [];
+parse_cookie(Cookie) ->
+ parse_cookie(Cookie, []).
+
+%% @spec test() -> ok
+%% @doc Run tests for mochiweb_cookies.
+test() ->
+ parse_cookie_test(),
+ cookie_test(),
+ ok.
+
+%% Internal API
+
+parse_cookie([], Acc) ->
+ lists:reverse(Acc);
+parse_cookie(String, Acc) ->
+ {{Token, Value}, Rest} = read_pair(String),
+ Acc1 = case Token of
+ "" ->
+ Acc;
+ "$" ++ _ ->
+ Acc;
+ _ ->
+ [{Token, Value} | Acc]
+ end,
+ parse_cookie(Rest, Acc1).
+
+read_pair(String) ->
+ {Token, Rest} = read_token(skip_whitespace(String)),
+ {Value, Rest1} = read_value(skip_whitespace(Rest)),
+ {{Token, Value}, skip_past_separator(Rest1)}.
+
+read_value([$= | Value]) ->
+ Value1 = skip_whitespace(Value),
+ case Value1 of
+ [?QUOTE | _] ->
+ read_quoted(Value1);
+ _ ->
+ read_token(Value1)
+ end;
+read_value(String) ->
+ {"", String}.
+
+read_quoted([?QUOTE | String]) ->
+ read_quoted(String, []).
+
+read_quoted([], Acc) ->
+ {lists:reverse(Acc), []};
+read_quoted([?QUOTE | Rest], Acc) ->
+ {lists:reverse(Acc), Rest};
+read_quoted([$\\, Any | Rest], Acc) ->
+ read_quoted(Rest, [Any | Acc]);
+read_quoted([C | Rest], Acc) ->
+ read_quoted(Rest, [C | Acc]).
+
+skip_whitespace(String) ->
+ F = fun (C) -> ?IS_WHITESPACE(C) end,
+ lists:dropwhile(F, String).
+
+read_token(String) ->
+ F = fun (C) -> not ?IS_SEPARATOR(C) end,
+ lists:splitwith(F, String).
+
+skip_past_separator([]) ->
+ [];
+skip_past_separator([$; | Rest]) ->
+ Rest;
+skip_past_separator([$, | Rest]) ->
+ Rest;
+skip_past_separator([_ | Rest]) ->
+ skip_past_separator(Rest).
+
+parse_cookie_test() ->
+ %% RFC example
+ C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
+ Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
+ Shipping=\"FedEx\"; $Path=\"/acme\"",
+ [
+ {"Customer","WILE_E_COYOTE"},
+ {"Part_Number","Rocket_Launcher_0001"},
+ {"Shipping","FedEx"}
+ ] = parse_cookie(C1),
+ %% Potential edge cases
+ [{"foo", "x"}] = parse_cookie("foo=\"\\x\""),
+ [] = parse_cookie("="),
+ [{"foo", ""}, {"bar", ""}] = parse_cookie(" foo ; bar "),
+ [{"foo", ""}, {"bar", ""}] = parse_cookie("foo=;bar="),
+ [{"foo", "\";"}, {"bar", ""}] = parse_cookie("foo = \"\\\";\";bar "),
+ [{"foo", "\";bar"}] = parse_cookie("foo=\"\\\";bar").
+
+any_to_list(V) when is_list(V) ->
+ V;
+any_to_list(V) when is_atom(V) ->
+ atom_to_list(V);
+any_to_list(V) when is_binary(V) ->
+ binary_to_list(V);
+any_to_list(V) when is_integer(V) ->
+ integer_to_list(V).
+
+
+cookie_test() ->
+ C1 = {"Set-Cookie",
+ "Customer=WILE_E_COYOTE; "
+ "Version=1; "
+ "Path=/acme"},
+ C1 = cookie("Customer", "WILE_E_COYOTE", [{path, "/acme"}]),
+ C1 = cookie("Customer", "WILE_E_COYOTE",
+ [{path, "/acme"}, {badoption, "negatory"}]),
+ C1 = cookie('Customer', 'WILE_E_COYOTE', [{path, '/acme'}]),
+ C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]),
+
+ {"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey", []),
+
+ LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
+ C2 = {"Set-Cookie",
+ "Customer=WILE_E_COYOTE; "
+ "Version=1; "
+ "Expires=Tue, 15 May 2007 13:45:33 GMT; "
+ "Max-Age=0"},
+ C2 = cookie("Customer", "WILE_E_COYOTE",
+ [{max_age, -111}, {local_time, LocalTime}]),
+ C3 = {"Set-Cookie",
+ "Customer=WILE_E_COYOTE; "
+ "Version=1; "
+ "Expires=Wed, 16 May 2007 13:45:50 GMT; "
+ "Max-Age=86417"},
+ C3 = cookie("Customer", "WILE_E_COYOTE",
+ [{max_age, 86417}, {local_time, LocalTime}]),
+ ok.
diff --git a/deps/mochiweb-r88/src/mochiweb_echo.erl b/deps/mochiweb-r88/src/mochiweb_echo.erl
new file mode 100644
index 0000000..f0c455a
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_echo.erl
@@ -0,0 +1,31 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Simple and stupid echo server to demo mochiweb_socket_server.
+
+-module(mochiweb_echo).
+-author('bob@mochimedia.com').
+-export([start/0, stop/0, loop/1]).
+
+stop() ->
+ mochiweb_socket_server:stop(?MODULE).
+
+start() ->
+ mochiweb_socket_server:start([{name, ?MODULE},
+ {port, 6789},
+ {ip, "127.0.0.1"},
+ {max, 1},
+ {loop, {?MODULE, loop}}]).
+
+loop(Socket) ->
+ case gen_tcp:recv(Socket, 0, 30000) of
+ {ok, Data} ->
+ case gen_tcp:send(Socket, Data) of
+ ok ->
+ loop(Socket);
+ _ ->
+ exit(normal)
+ end;
+ _Other ->
+ exit(normal)
+ end.
diff --git a/deps/mochiweb-r88/src/mochiweb_headers.erl b/deps/mochiweb-r88/src/mochiweb_headers.erl
new file mode 100644
index 0000000..df67c7d
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_headers.erl
@@ -0,0 +1,178 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Case preserving (but case insensitive) HTTP Header dictionary.
+
+-module(mochiweb_headers).
+-author('bob@mochimedia.com').
+-export([empty/0, from_list/1, insert/3, enter/3, get_value/2, lookup/2]).
+-export([get_primary_value/2]).
+-export([default/3, enter_from_list/2, default_from_list/2]).
+-export([to_list/1, make/1]).
+-export([test/0]).
+
+%% @type headers().
+%% @type key() = atom() | binary() | string().
+%% @type value() = atom() | binary() | string() | integer().
+
+%% @spec test() -> ok
+%% @doc Run tests for this module.
+test() ->
+ H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
+ [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
+ H1 = ?MODULE:insert(taco, grande, H),
+ [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
+ H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
+ [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
+ H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
+ [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
+ "foo, bar" = ?MODULE:get_value("set-cookie", H3),
+ {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
+ undefined = ?MODULE:get_value("shibby", H3),
+ none = ?MODULE:lookup("shibby", H3),
+ H4 = ?MODULE:insert("content-type",
+ "application/x-www-form-urlencoded; charset=utf8",
+ H3),
+ "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
+ "content-type", H4),
+ ok.
+
+%% @spec empty() -> headers()
+%% @doc Create an empty headers structure.
+empty() ->
+ gb_trees:empty().
+
+%% @spec make(headers() | [{key(), value()}]) -> headers()
+%% @doc Construct a headers() from the given list.
+make(L) when is_list(L) ->
+ from_list(L);
+%% assume a tuple is already mochiweb_headers.
+make(T) when is_tuple(T) ->
+ T.
+
+%% @spec from_list([{key(), value()}]) -> headers()
+%% @doc Construct a headers() from the given list.
+from_list(List) ->
+ lists:foldl(fun ({K, V}, T) -> insert(K, V, T) end, empty(), List).
+
+%% @spec enter_from_list([{key(), value()}], headers()) -> headers()
+%% @doc Insert pairs into the headers, replace any values for existing keys.
+enter_from_list(List, T) ->
+ lists:foldl(fun ({K, V}, T1) -> enter(K, V, T1) end, T, List).
+
+%% @spec default_from_list([{key(), value()}], headers()) -> headers()
+%% @doc Insert pairs into the headers for keys that do not already exist.
+default_from_list(List, T) ->
+ lists:foldl(fun ({K, V}, T1) -> default(K, V, T1) end, T, List).
+
+%% @spec to_list(headers()) -> [{key(), string()}]
+%% @doc Return the contents of the headers. The keys will be the exact key
+%% that was first inserted (e.g. may be an atom or binary, case is
+%% preserved).
+to_list(T) ->
+ F = fun ({K, {array, L}}, Acc) ->
+ L1 = lists:reverse(L),
+ lists:foldl(fun (V, Acc1) -> [{K, V} | Acc1] end, Acc, L1);
+ (Pair, Acc) ->
+ [Pair | Acc]
+ end,
+ lists:reverse(lists:foldl(F, [], gb_trees:values(T))).
+
+%% @spec get_value(key(), headers()) -> string() | undefined
+%% @doc Return the value of the given header using a case insensitive search.
+%% undefined will be returned for keys that are not present.
+get_value(K, T) ->
+ case lookup(K, T) of
+ {value, {_, V}} ->
+ expand(V);
+ none ->
+ undefined
+ end.
+
+%% @spec get_primary_value(key(), headers()) -> string() | undefined
+%% @doc Return the value of the given header up to the first semicolon using
+%% a case insensitive search. undefined will be returned for keys
+%% that are not present.
+get_primary_value(K, T) ->
+ case get_value(K, T) of
+ undefined ->
+ undefined;
+ V ->
+ lists:takewhile(fun (C) -> C =/= $; end, V)
+ end.
+
+%% @spec lookup(key(), headers()) -> {value, {key(), string()}} | none
+%% @doc Return the case preserved key and value for the given header using
+%% a case insensitive search. none will be returned for keys that are
+%% not present.
+lookup(K, T) ->
+ case gb_trees:lookup(normalize(K), T) of
+ {value, {K0, V}} ->
+ {value, {K0, expand(V)}};
+ none ->
+ none
+ end.
+
+%% @spec default(key(), value(), headers()) -> headers()
+%% @doc Insert the pair into the headers if it does not already exist.
+default(K, V, T) ->
+ K1 = normalize(K),
+ V1 = any_to_list(V),
+ try gb_trees:insert(K1, {K, V1}, T)
+ catch
+ error:{key_exists, _} ->
+ T
+ end.
+
+%% @spec enter(key(), value(), headers()) -> headers()
+%% @doc Insert the pair into the headers, replacing any pre-existing key.
+enter(K, V, T) ->
+ K1 = normalize(K),
+ V1 = any_to_list(V),
+ gb_trees:enter(K1, {K, V1}, T).
+
+%% @spec insert(key(), value(), headers()) -> headers()
+%% @doc Insert the pair into the headers, merging with any pre-existing key.
+%% A merge is done with Value = V0 ++ ", " ++ V1.
+insert(K, V, T) ->
+ K1 = normalize(K),
+ V1 = any_to_list(V),
+ try gb_trees:insert(K1, {K, V1}, T)
+ catch
+ error:{key_exists, _} ->
+ {K0, V0} = gb_trees:get(K1, T),
+ V2 = merge(K1, V1, V0),
+ gb_trees:update(K1, {K0, V2}, T)
+ end.
+
+%% Internal API
+
+expand({array, L}) ->
+ mochiweb_util:join(lists:reverse(L), ", ");
+expand(V) ->
+ V.
+
+merge("set-cookie", V1, {array, L}) ->
+ {array, [V1 | L]};
+merge("set-cookie", V1, V0) ->
+ {array, [V1, V0]};
+merge(_, V1, V0) ->
+ V0 ++ ", " ++ V1.
+
+normalize(K) when is_list(K) ->
+ string:to_lower(K);
+normalize(K) when is_atom(K) ->
+ normalize(atom_to_list(K));
+normalize(K) when is_binary(K) ->
+ normalize(binary_to_list(K)).
+
+any_to_list(V) when is_list(V) ->
+ V;
+any_to_list(V) when is_atom(V) ->
+ atom_to_list(V);
+any_to_list(V) when is_binary(V) ->
+ binary_to_list(V);
+any_to_list(V) when is_integer(V) ->
+ integer_to_list(V).
+
+
diff --git a/deps/mochiweb-r88/src/mochiweb_html.erl b/deps/mochiweb-r88/src/mochiweb_html.erl
new file mode 100644
index 0000000..ec1ae9f
--- /dev/null
+++ b/deps/mochiweb-r88/src/mochiweb_html.erl
@@ -0,0 +1,880 @@
+%% @author Bob Ippolito
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Loosely tokenizes and generates parse trees for HTML 4.
+-module(mochiweb_html).
+-export([tokens/1, parse/1, parse_tokens/1, to_tokens/1, escape/1,
+ escape_attr/1, to_html/1, test/0]).
+
+% This is a macro to placate syntax highlighters..
+-define(QUOTE, $\").
+-define(SQUOTE, $\').
+-define(ADV_COL(S, N),
+ S#decoder{column=N+S#decoder.column,
+ offset=N+S#decoder.offset}).
+-define(INC_COL(S),
+ S#decoder{column=1+S#decoder.column,
+ offset=1+S#decoder.offset}).
+-define(INC_LINE(S),
+ S#decoder{column=1,
+ line=1+S#decoder.line,
+ offset=1+S#decoder.offset}).
+-define(INC_CHAR(S, C),
+ case C of
+ $\n ->
+ S#decoder{column=1,
+ line=1+S#decoder.line,
+ offset=1+S#decoder.offset};
+ _ ->
+ S#decoder{column=1+S#decoder.column,
+ offset=1+S#decoder.offset}
+ end).
+
+-define(IS_WHITESPACE(C),
+ (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
+-define(IS_LITERAL_SAFE(C),
+ ((C >= $A andalso C =< $Z) orelse (C >= $a andalso C =< $z)
+ orelse (C >= $0 andalso C =< $9))).
+
+-record(decoder, {line=1,
+ column=1,
+ offset=0}).
+
+%% @type html_node() = {string(), [html_attr()], [html_node() | string()]}
+%% @type html_attr() = {string(), string()}
+%% @type html_token() = html_data() | start_tag() | end_tag() | inline_html() | html_comment() | html_doctype()
+%% @type html_data() = {data, string(), Whitespace::boolean()}
+%% @type start_tag() = {start_tag, Name, [html_attr()], Singleton::boolean()}
+%% @type end_tag() = {end_tag, Name}
+%% @type html_comment() = {comment, Comment}
+%% @type html_doctype() = {doctype, [Doctype]}
+%% @type inline_html() = {'=', iolist()}
+
+%% External API.
+
+%% @spec parse(string() | binary()) -> html_node()
+%% @doc tokenize and then transform the token stream into a HTML tree.
+parse(Input) ->
+ parse_tokens(tokens(Input)).
+
+%% @spec parse_tokens([html_token()]) -> html_node()
+%% @doc Transform the output of tokens(Doc) into a HTML tree.
+parse_tokens(Tokens) when is_list(Tokens) ->
+ %% Skip over doctype, processing instructions
+ F = fun (X) ->
+ case X of
+ {start_tag, _, _, false} ->
+ false;
+ _ ->
+ true
+ end
+ end,
+ [{start_tag, Tag, Attrs, false} | Rest] = lists:dropwhile(F, Tokens),
+ {Tree, _} = tree(Rest, [norm({Tag, Attrs})]),
+ Tree.
+
+%% @spec tokens(StringOrBinary) -> [html_token()]
+%% @doc Transform the input UTF-8 HTML into a token stream.
+tokens(Input) ->
+ tokens(iolist_to_binary(Input), #decoder{}, []).
+
+%% @spec to_tokens(html_node()) -> [html_token()]
+%% @doc Convert a html_node() tree to a list of tokens.
+to_tokens({Tag0}) ->
+ to_tokens({Tag0, [], []});
+to_tokens(T={'=', _}) ->
+ [T];
+to_tokens(T={doctype, _}) ->
+ [T];
+to_tokens(T={comment, _}) ->
+ [T];
+to_tokens({Tag0, Acc}) ->
+ to_tokens({Tag0, [], Acc});
+to_tokens({Tag0, Attrs, Acc}) ->
+ Tag = to_tag(Tag0),
+ to_tokens([{Tag, Acc}], [{start_tag, Tag, Attrs, is_singleton(Tag)}]).
+
+%% @spec to_html([html_token()] | html_node()) -> iolist()
+%% @doc Convert a list of html_token() to a HTML document.
+to_html(Node) when is_tuple(Node) ->
+ to_html(to_tokens(Node));
+to_html(Tokens) when is_list(Tokens) ->
+ to_html(Tokens, []).
+
+%% @spec escape(string() | atom() | binary()) -> binary()
+%% @doc Escape a string such that it's safe for HTML (amp; lt; gt;).
+escape(B) when is_binary(B) ->
+ escape(binary_to_list(B), []);
+escape(A) when is_atom(A) ->
+ escape(atom_to_list(A), []);
+escape(S) when is_list(S) ->
+ escape(S, []).
+
+%% @spec escape_attr(string() | binary() | atom() | integer() | float()) -> binary()
+%% @doc Escape a string such that it's safe for HTML attrs
+%% (amp; lt; gt; quot;).
+escape_attr(B) when is_binary(B) ->
+ escape_attr(binary_to_list(B), []);
+escape_attr(A) when is_atom(A) ->
+ escape_attr(atom_to_list(A), []);
+escape_attr(S) when is_list(S) ->
+ escape_attr(S, []);
+escape_attr(I) when is_integer(I) ->
+ escape_attr(integer_to_list(I), []);
+escape_attr(F) when is_float(F) ->
+ escape_attr(mochinum:digits(F), []).
+
+%% @spec test() -> ok
+%% @doc Run tests for mochiweb_html.
+test() ->
+ test_destack(),
+ test_tokens(),
+ test_tokens2(),
+ test_parse(),
+ test_parse2(),
+ test_parse_tokens(),
+ test_escape(),
+ test_escape_attr(),
+ test_to_html(),
+ ok.
+
+
+%% Internal API
+
+test_to_html() ->
+ Expect = <<"hey!what's up
sucka
">>,
+ Expect = iolist_to_binary(
+ to_html({html, [],
+ [{<<"head">>, [],
+ [{title, <<"hey!">>}]},
+ {body, [],
+ [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
+ {'div', <<"sucka">>},
+ {comment, <<" comment! ">>}]}]})),
+ Expect1 = <<"">>,
+ Expect1 = iolist_to_binary(
+ to_html({doctype,
+ [<<"html">>, <<"PUBLIC">>,
+ <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
+ <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]})),
+ ok.
+to_html([], Acc) ->
+ lists:reverse(Acc);
+to_html([{'=', Content} | Rest], Acc) ->
+ to_html(Rest, [Content | Acc]);
+to_html([{pi, Tag, Attrs} | Rest], Acc) ->
+ Open = [<<"">>,
+ Tag,
+ attrs_to_html(Attrs, []),
+ <<"?>">>],
+ to_html(Rest, [Open | Acc]);
+to_html([{comment, Comment} | Rest], Acc) ->
+ to_html(Rest, [[<<"">>] | Acc]);
+to_html([{doctype, Parts} | Rest], Acc) ->
+ Inside = doctype_to_html(Parts, Acc),
+ to_html(Rest, [[<<">, Inside, <<">">>] | Acc]);
+to_html([{data, Data, _Whitespace} | Rest], Acc) ->
+ to_html(Rest, [escape(Data) | Acc]);
+to_html([{start_tag, Tag, Attrs, Singleton} | Rest], Acc) ->
+ Open = [<<"<">>,
+ Tag,
+ attrs_to_html(Attrs, []),
+ case Singleton of
+ true -> <<" />">>;
+ false -> <<">">>
+ end],
+ to_html(Rest, [Open | Acc]);
+to_html([{end_tag, Tag} | Rest], Acc) ->
+ to_html(Rest, [[<<"">>, Tag, <<">">>] | Acc]).
+
+doctype_to_html([], Acc) ->
+ lists:reverse(Acc);
+doctype_to_html([Word | Rest], Acc) ->
+ case lists:all(fun (C) -> ?IS_LITERAL_SAFE(C) end,
+ binary_to_list(iolist_to_binary(Word))) of
+ true ->
+ doctype_to_html(Rest, [[<<" ">>, Word] | Acc]);
+ false ->
+ doctype_to_html(Rest, [[<<" \"">>, escape_attr(Word), ?QUOTE] | Acc])
+ end.
+
+attrs_to_html([], Acc) ->
+ lists:reverse(Acc);
+attrs_to_html([{K, V} | Rest], Acc) ->
+ attrs_to_html(Rest,
+ [[<<" ">>, escape(K), <<"=\"">>,
+ escape_attr(V), <<"\"">>] | Acc]).
+
+test_escape() ->
+ <<""\"word <<up!"">> =
+ escape(<<""\"word <>),
+ ok.
+
+test_escape_attr() ->
+ <<"""word <<up!"">> =
+ escape_attr(<<""\"word <>),
+ ok.
+
+escape([], Acc) ->
+ list_to_binary(lists:reverse(Acc));
+escape("<" ++ Rest, Acc) ->
+ escape(Rest, lists:reverse("<", Acc));
+escape(">" ++ Rest, Acc) ->
+ escape(Rest, lists:reverse(">", Acc));
+escape("&" ++ Rest, Acc) ->
+ escape(Rest, lists:reverse("&", Acc));
+escape([C | Rest], Acc) ->
+ escape(Rest, [C | Acc]).
+
+escape_attr([], Acc) ->
+ list_to_binary(lists:reverse(Acc));
+escape_attr("<" ++ Rest, Acc) ->
+ escape_attr(Rest, lists:reverse("<", Acc));
+escape_attr(">" ++ Rest, Acc) ->
+ escape_attr(Rest, lists:reverse(">", Acc));
+escape_attr("&" ++ Rest, Acc) ->
+ escape_attr(Rest, lists:reverse("&", Acc));
+escape_attr([?QUOTE | Rest], Acc) ->
+ escape_attr(Rest, lists:reverse(""", Acc));
+escape_attr([C | Rest], Acc) ->
+ escape_attr(Rest, [C | Acc]).
+
+to_tag(A) when is_atom(A) ->
+ norm(atom_to_list(A));
+to_tag(L) ->
+ norm(L).
+
+to_tokens([], Acc) ->
+ lists:reverse(Acc);
+to_tokens([{Tag, []} | Rest], Acc) ->
+ to_tokens(Rest, [{end_tag, to_tag(Tag)} | Acc]);
+to_tokens([{Tag0, [{T0} | R1]} | Rest], Acc) ->
+ %% Allow {br}
+ to_tokens([{Tag0, [{T0, [], []} | R1]} | Rest], Acc);
+to_tokens([{Tag0, [T0={'=', _C0} | R1]} | Rest], Acc) ->
+ %% Allow {'=', iolist()}
+ to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
+to_tokens([{Tag0, [T0={comment, _C0} | R1]} | Rest], Acc) ->
+ %% Allow {comment, iolist()}
+ to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
+to_tokens([{Tag0, [{T0, A0=[{_, _} | _]} | R1]} | Rest], Acc) ->
+ %% Allow {p, [{"class", "foo"}]}
+ to_tokens([{Tag0, [{T0, A0, []} | R1]} | Rest], Acc);
+to_tokens([{Tag0, [{T0, C0} | R1]} | Rest], Acc) ->
+ %% Allow {p, "content"} and {p, <<"content">>}
+ to_tokens([{Tag0, [{T0, [], C0} | R1]} | Rest], Acc);
+to_tokens([{Tag0, [{T0, A1, C0} | R1]} | Rest], Acc) when is_binary(C0) ->
+ %% Allow {"p", [{"class", "foo"}], <<"content">>}
+ to_tokens([{Tag0, [{T0, A1, binary_to_list(C0)} | R1]} | Rest], Acc);
+to_tokens([{Tag0, [{T0, A1, C0=[C | _]} | R1]} | Rest], Acc)
+ when is_integer(C) ->
+ %% Allow {"p", [{"class", "foo"}], "content"}
+ to_tokens([{Tag0, [{T0, A1, [C0]} | R1]} | Rest], Acc);
+to_tokens([{Tag0, [{T0, A1, C1} | R1]} | Rest], Acc) ->
+ %% Native {"p", [{"class", "foo"}], ["content"]}
+ Tag = to_tag(Tag0),
+ T1 = to_tag(T0),
+ case is_singleton(norm(T1)) of
+ true ->
+ to_tokens([{Tag, R1} | Rest], [{start_tag, T1, A1, true} | Acc]);
+ false ->
+ to_tokens([{T1, C1}, {Tag, R1} | Rest],
+ [{start_tag, T1, A1, false} | Acc])
+ end;
+to_tokens([{Tag0, [L | R1]} | Rest], Acc) when is_list(L) ->
+ %% List text
+ Tag = to_tag(Tag0),
+ to_tokens([{Tag, R1} | Rest], [{data, iolist_to_binary(L), false} | Acc]);
+to_tokens([{Tag0, [B | R1]} | Rest], Acc) when is_binary(B) ->
+ %% Binary text
+ Tag = to_tag(Tag0),
+ to_tokens([{Tag, R1} | Rest], [{data, B, false} | Acc]).
+
+test_tokens() ->
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}] =
+ tokens(<<"">>),
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}] =
+ tokens(<<"">>),
+ [{comment, <<"[if lt IE 7]>\n\n>}] =
+ tokens(<<"">>),
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}] =
+ tokens(<<"">>),
+ [{start_tag, <<"textarea">>, [], false},
+ {data, <<"">>, false},
+ {end_tag, <<"textarea">>}] =
+ tokens(<<"">>),
+ ok.
+
+tokens(B, S=#decoder{offset=O}, Acc) ->
+ case B of
+ <<_:O/binary>> ->
+ lists:reverse(Acc);
+ _ ->
+ {Tag, S1} = tokenize(B, S),
+ case parse_flag(Tag) of
+ script ->
+ {Tag2, S2} = tokenize_script(B, S1),
+ tokens(B, S2, [Tag2, Tag | Acc]);
+ textarea ->
+ {Tag2, S2} = tokenize_textarea(B, S1),
+ tokens(B, S2, [Tag2, Tag | Acc]);
+ none ->
+ tokens(B, S1, [Tag | Acc])
+ end
+ end.
+
+parse_flag({start_tag, B, _, false}) ->
+ case string:to_lower(binary_to_list(B)) of
+ "script" ->
+ script;
+ "textarea" ->
+ textarea;
+ _ ->
+ none
+ end;
+parse_flag(_) ->
+ none.
+
+tokenize(B, S=#decoder{offset=O}) ->
+ case B of
+ <<_:O/binary, "
+
+
+
+ CDATA>>]]>
+