diff options
author | Calvin Morrison <calvin@pobox.com> | 2025-09-03 21:15:36 -0400 |
---|---|---|
committer | Calvin Morrison <calvin@pobox.com> | 2025-09-03 21:15:36 -0400 |
commit | 49fa5aa2a127bdf8924d02bf77e5086b39c7a447 (patch) | |
tree | 61d86a7705dacc9fddccc29fa79d075d83ab8059 /server/_build/default/plugins |
Diffstat (limited to 'server/_build/default/plugins')
38 files changed, 6428 insertions, 0 deletions
diff --git a/server/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag b/server/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag Binary files differnew file mode 100644 index 0000000..1c3fc99 --- /dev/null +++ b/server/_build/default/plugins/.rebar3/rebar_compiler_erl/source_apps.dag diff --git a/server/_build/default/plugins/coveralls/README.md b/server/_build/default/plugins/coveralls/README.md new file mode 100644 index 0000000..eecfbb0 --- /dev/null +++ b/server/_build/default/plugins/coveralls/README.md @@ -0,0 +1,126 @@ +coveralls-erl +============= +[](https://travis-ci.org/markusn/coveralls-erl) +[](https://coveralls.io/r/markusn/coveralls-erl?branch=master) +[](https://hex.pm/packages/coveralls) + +Erlang module to convert and send cover data to coveralls. Available as a hex package on https://hex.pm/packages/coveralls. + +## Example usage: rebar3 and Travis CI +In order to use coveralls-erl + Travis CI in your project you will need to add the following lines to your +`rebar.config.script`: + +```erlang +case os:getenv("TRAVIS") of + "true" -> + JobId = os:getenv("TRAVIS_JOB_ID"), + lists:keystore(coveralls_service_job_id, 1, CONFIG, {coveralls_service_job_id, JobId}); + _ -> + CONFIG +end. +``` + +This will ensure that the rebar coveralls plugin will have access to the needed JobId and that the plugin is only run from Travis CI. + +You will also need to add the following lines to your `rebar.config`: +```erlang +{plugins , [coveralls]}. % use hex package +{cover_enabled , true}. +{cover_export_enabled , true}. +{coveralls_coverdata , "_build/test/cover/eunit.coverdata"}. % or a string with wildcards or a list of files +{coveralls_service_name , "travis-ci"}. % use "travis-pro" when using with travis-ci.com +``` +When using with travis-ci.com coveralls repo token also has to be added as `{coveralls_repo_token, "token_goes_here"}` + +These changes will add `coveralls-erl` as a dependency, tell `rebar3` where to find the plugin, make sure that the coverage data is produced and exported and configure `coveralls-erl` to use this data and the service `travis-ci`. + +And you send the coverdata to coveralls by issuing: `rebar3 as test coveralls send` + +**Note:** +If you have dependencies specific to the test profile, or if you only add the coveralls dependency or any of its' configuration variables to the test profile you need to run coveralls using: `rebar3 as test coveralls send` + +## Example: rebar3 and CircleCI +Example `rebar.config.script`: + +```erlang +case {os:getenv("CIRCLECI"), os:getenv("COVERALLS_REPO_TOKEN")} of + {"true", Token} when is_list(Token) -> + JobId = os:getenv("CIRCLE_BUILD_NUM"), + CONFIG1 = lists:keystore(coveralls_service_job_id, 1, CONFIG, {coveralls_service_job_id, JobId}), + lists:keystore(coveralls_repo_token, 1, CONFIG1, {coveralls_repo_token, Token}); + _ -> + CONFIG +end. +``` + +Example `rebar.config`: + +```erlang + +{plugins , [coveralls]}. % use hex package +{cover_enabled , true}. +{cover_export_enabled , true}. +{coveralls_coverdata , "_build/test/cover/ct.coverdata"}. +{coveralls_service_name , "circle-ci"}. +``` + +Note that you'll need to set `COVERALLS_REPO_TOKEN` in your CircleCI environment variables! + +## Example usage: rebar3 and GitHub Actions + +In order to use coveralls-erl + GitHub Actions in your project, you will need to add the following lines to your +`rebar.config.script`: + +```erlang +case {os:getenv("GITHUB_ACTIONS"), os:getenv("GITHUB_TOKEN")} of + {"true", Token} when is_list(Token) -> + CONFIG1 = [{coveralls_repo_token, Token}, + {coveralls_service_job_id, os:getenv("GITHUB_RUN_ID")}, + {coveralls_commit_sha, os:getenv("GITHUB_SHA")}, + {coveralls_service_number, os:getenv("GITHUB_RUN_NUMBER")} | CONFIG], + case os:getenv("GITHUB_EVENT_NAME") =:= "pull_request" + andalso string:tokens(os:getenv("GITHUB_REF"), "/") of + [_, "pull", PRNO, _] -> + [{coveralls_service_pull_request, PRNO} | CONFIG1]; + _ -> + CONFIG1 + end; + _ -> + CONFIG +end. +``` + +This will ensure that the rebar coveralls plugin will have access to the needed JobId and that the plugin is only run from GitHub Actions. + +You will also need to add the following lines to your `rebar.config`: +```erlang +{plugins , [coveralls]}. % use hex package +{cover_enabled , true}. +{cover_export_enabled , true}. +{coveralls_coverdata , "_build/test/cover/eunit.coverdata"}. % or a string with wildcards or a list of files +{coveralls_service_name , "github"}. +``` + +These changes will add `coveralls-erl` as a dependency, tell `rebar3` where to find the plugin, make sure that the coverage data is produced and exported and configure `coveralls-erl` to use this data and the service `github`. + +And you send the coverdata to coveralls by adding a step like: + +``` +- name: Coveralls + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: rebar3 as test coveralls send +``` + +Other available GitHub Actions Environment Variables are available [here](https://help.github.com/en/actions/configuring-and-managing-workflows/using-environment-variables) + +## Optional settings + +The pluging also support the `coveralls_service_pull_request` and `coveralls_parallel` settings. +See the Coveralls documentation for the meaning of those. + +## Author +Markus Ekholm (markus at botten dot org). + +## License +3-clause BSD. For details see `COPYING`. diff --git a/server/_build/default/plugins/coveralls/ebin/coveralls.app b/server/_build/default/plugins/coveralls/ebin/coveralls.app new file mode 100644 index 0000000..c77ac5c --- /dev/null +++ b/server/_build/default/plugins/coveralls/ebin/coveralls.app @@ -0,0 +1,12 @@ +{application,coveralls, + [{registered,[]}, + {description,"Coveralls for Erlang"}, + {vsn,"2.2.0"}, + {licenses,["BSD"]}, + {modules,[coveralls,rebar3_coveralls]}, + {registred,[]}, + {applications,[kernel,stdlib]}, + {env,[{providers,[rebar3_coveralls]}]}, + {maintainers,["Markus Ekholm"]}, + {links,[{"Github", + "https://github.com/markusn/coveralls-erl"}]}]}. diff --git a/server/_build/default/plugins/coveralls/ebin/coveralls.beam b/server/_build/default/plugins/coveralls/ebin/coveralls.beam Binary files differnew file mode 100644 index 0000000..3664b34 --- /dev/null +++ b/server/_build/default/plugins/coveralls/ebin/coveralls.beam diff --git a/server/_build/default/plugins/coveralls/ebin/rebar3_coveralls.beam b/server/_build/default/plugins/coveralls/ebin/rebar3_coveralls.beam Binary files differnew file mode 100644 index 0000000..1ffa9b2 --- /dev/null +++ b/server/_build/default/plugins/coveralls/ebin/rebar3_coveralls.beam diff --git a/server/_build/default/plugins/coveralls/hex_metadata.config b/server/_build/default/plugins/coveralls/hex_metadata.config new file mode 100644 index 0000000..c8a4f91 --- /dev/null +++ b/server/_build/default/plugins/coveralls/hex_metadata.config @@ -0,0 +1,17 @@ +{<<"name">>,<<"coveralls">>}. +{<<"version">>,<<"2.2.0">>}. +{<<"requirements">>, + #{<<"jsx">> => + #{<<"app">> => <<"jsx">>,<<"optional">> => false, + <<"requirement">> => <<"2.10.0">>}}}. +{<<"app">>,<<"coveralls">>}. +{<<"maintainers">>,[<<"Markus Ekholm">>]}. +{<<"precompiled">>,false}. +{<<"description">>,<<"Coveralls for Erlang">>}. +{<<"files">>, + [<<"src/coveralls.app.src">>,<<"README.md">>,<<"rebar.config">>, + <<"rebar.config.script">>,<<"rebar.lock">>,<<"src/coveralls.erl">>, + <<"src/rebar3_coveralls.erl">>]}. +{<<"licenses">>,[<<"BSD">>]}. +{<<"links">>,[{<<"Github">>,<<"https://github.com/markusn/coveralls-erl">>}]}. +{<<"build_tools">>,[<<"rebar3">>]}. diff --git a/server/_build/default/plugins/coveralls/rebar.config b/server/_build/default/plugins/coveralls/rebar.config new file mode 100644 index 0000000..e23984e --- /dev/null +++ b/server/_build/default/plugins/coveralls/rebar.config @@ -0,0 +1,7 @@ +{deps, [{jsx, "2.10.0"}]}. +{profiles, [{test, [{plugins, [{coveralls, {git, "https://github.com/markusn/coveralls-erl", {branch, "master"}}}]}]}]}. +{cover_enabled , true}. +{cover_export_enabled , true}. +{coveralls_coverdata , "_build/test/cover/eunit.coverdata"}. % or a string with wildcards or a list of files +{coveralls_service_name , "travis-ci"}. +{coveralls_parallel, true}. diff --git a/server/_build/default/plugins/coveralls/rebar.config.script b/server/_build/default/plugins/coveralls/rebar.config.script new file mode 100644 index 0000000..8886d94 --- /dev/null +++ b/server/_build/default/plugins/coveralls/rebar.config.script @@ -0,0 +1,7 @@ +case os:getenv("TRAVIS") of + "true" -> + JobId = os:getenv("TRAVIS_JOB_ID"), + lists:keystore(coveralls_service_job_id, 1, CONFIG, {coveralls_service_job_id, JobId}); + _ -> + CONFIG +end.
\ No newline at end of file diff --git a/server/_build/default/plugins/coveralls/rebar.lock b/server/_build/default/plugins/coveralls/rebar.lock new file mode 100644 index 0000000..82f478c --- /dev/null +++ b/server/_build/default/plugins/coveralls/rebar.lock @@ -0,0 +1,6 @@ +{"1.1.0", +[{<<"jsx">>,{pkg,<<"jsx">>,<<"2.10.0">>},0}]}. +[ +{pkg_hash,[ + {<<"jsx">>, <<"77760560D6AC2B8C51FD4C980E9E19B784016AA70BE354CE746472C33BEB0B1C">>}]} +]. diff --git a/server/_build/default/plugins/coveralls/src/coveralls.app.src b/server/_build/default/plugins/coveralls/src/coveralls.app.src new file mode 100644 index 0000000..85a0d8e --- /dev/null +++ b/server/_build/default/plugins/coveralls/src/coveralls.app.src @@ -0,0 +1,11 @@ +{application,coveralls, + [{description,"Coveralls for Erlang"}, + {vsn,"2.2.0"}, + {licenses,["BSD"]}, + {modules,[]}, + {registred,[]}, + {applications,[kernel,stdlib]}, + {env,[{providers,[rebar3_coveralls]}]}, + {maintainers,["Markus Ekholm"]}, + {links,[{"Github", + "https://github.com/markusn/coveralls-erl"}]}]}. diff --git a/server/_build/default/plugins/coveralls/src/coveralls.erl b/server/_build/default/plugins/coveralls/src/coveralls.erl new file mode 100644 index 0000000..90954c6 --- /dev/null +++ b/server/_build/default/plugins/coveralls/src/coveralls.erl @@ -0,0 +1,499 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% Copyright (c) 2013-2016, Markus Ekholm +%%% All rights reserved. +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions are met: +%%% * Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% * Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% * Neither the name of the <organization> nor the +%%% names of its contributors may be used to endorse or promote products +%%% derived from this software without specific prior written permission. +%%% +%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +%%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL MARKUS EKHOLM BE LIABLE FOR ANY +%%% DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +%%% (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +%%% LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +%%% ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +%%% THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +%%% +%%% @copyright 2013-2016 (c) Markus Ekholm <markus@botten.org> +%%% @author Markus Ekholm <markus@botten.org> +%%% @doc coveralls +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +%%============================================================================= +%% Module declaration + +-module(coveralls). + +%%============================================================================= +%% Exports + +-export([ convert_file/2 + , convert_and_send_file/2 + ]). + +%%============================================================================= +%% Records + +-record(s, { importer = fun cover:import/1 + , module_lister = fun cover:imported_modules/0 + , mod_info = fun module_info_compile/1 + , file_reader = fun file:read_file/1 + , wildcard_reader = fun filelib:wildcard/1 + , analyser = fun cover:analyse/3 + , poster = fun httpc:request/4 + , poster_init = start_wrapper([fun ssl:start/0, fun inets:start/0]) + }). + +%%============================================================================= +%% Defines + +-define(COVERALLS_URL, "https://coveralls.io/api/v1/jobs"). +%%-define(COVERALLS_URL, "http://127.0.0.1:8080"). + +-ifdef(random_only). +-define(random, random). +-else. +-define(random, rand). +-endif. + +%%============================================================================= +%% API functions + +%% @doc Import and convert cover file(s) `Filenames' to a json string +%% representation suitable to post to coveralls. +%% +%% Note that this function will crash if the modules mentioned in +%% any of the `Filenames' are not availabe on the node. +%% @end +-spec convert_file(string() | [string()], map()) -> + string(). +convert_file(Filenames, Report) -> + convert_file(Filenames, Report, #s{}). + +%% @doc Import and convert cover files `Filenames' to a json string and send the +%% json to coveralls. +%% @end +-spec convert_and_send_file(string() | [string()], map()) -> ok. +convert_and_send_file(Filenames, Report) -> + convert_and_send_file(Filenames, Report, #s{}). + +%%============================================================================= +%% Internal functions + +convert_file([L|_]=Filename, Report, S) when is_integer(L) -> + %% single file or wildcard was specified + WildcardReader = S#s.wildcard_reader, + Filenames = WildcardReader(Filename), + convert_file(Filenames, Report, S); +convert_file([[_|_]|_]=Filenames, Report, S) -> + ok = lists:foreach( + fun(Filename) -> ok = import(S, Filename) end, + Filenames), + ConvertedModules = convert_modules(S), + jsx:encode(Report#{source_files => ConvertedModules}, []). + +convert_and_send_file(Filenames, Report, S) -> + send(convert_file(Filenames, Report, S), S). + +send(Json, #s{poster=Poster, poster_init=Init}) -> + ok = Init(), + Boundary = ["----------", integer_to_list(?random:uniform(1000))], + Type = "multipart/form-data; boundary=" ++ Boundary, + Body = to_body(Json, Boundary), + R = Poster(post, {?COVERALLS_URL, [], Type, Body}, [], []), + {ok, {{_, ReturnCode, _}, _, Message}} = R, + case ReturnCode of + 200 -> ok; + ErrCode -> throw({error, {ErrCode, Message}}) + end. + +%%----------------------------------------------------------------------------- +%% HTTP helpers + +to_body(Json, Boundary) -> + iolist_to_binary(["--", Boundary, "\r\n", + "Content-Disposition: form-data; name=\"json_file\"; " + "filename=\"json_file.json\" \r\n" + "Content-Type: application/json\r\n\r\n", + Json, "\r\n", "--", Boundary, "--", "\r\n"]). + +%%----------------------------------------------------------------------------- +%% Callback mockery + +import(#s{importer=F}, File) -> F(File). + +imported_modules(#s{module_lister=F}) -> F(). + +analyze(#s{analyser=F}, Mod) -> F(Mod, calls, line). + +compile_info(#s{mod_info=F}, Mod) -> F(Mod). + +-ifdef(TEST). +module_info_compile(Mod) -> Mod:module_info(compile). +-else. +module_info_compile(Mod) -> + code:load_file(Mod), + case code:is_loaded(Mod) of + {file, _} -> Mod:module_info(compile); + _ -> [] + end. +-endif. + +read_file(#s{file_reader=_F}, "") -> {ok, <<"">>}; +read_file(#s{file_reader=F}, SrcFile) -> F(SrcFile). + +start_wrapper(Funs) -> + fun() -> + lists:foreach(fun(F) -> ok = wrap_start(F) end, Funs) + end. + +wrap_start(StartFun) -> + case StartFun() of + {error,{already_started,_}} -> ok; + ok -> ok + end. + +digit(I) when I < 10 -> <<($0 + I):8>>; +digit(I) -> <<($a -10 + I):8>>. + +hex(<<>>) -> + <<>>; +hex(<<I:4, R/bitstring>>) -> + <<(digit(I))/binary, (hex(R))/binary>>. + +%%----------------------------------------------------------------------------- +%% Converting modules + +convert_modules(S) -> + F = fun(Mod, L) -> convert_module(Mod, S, L) end, + lists:foldr(F, [], imported_modules(S)). + +convert_module(Mod, S, L) -> + {ok, CoveredLines0} = analyze(S, Mod), + %% Remove strange 0 indexed line + FilterF = fun({{_, X}, _}) -> X =/= 0 end, + CoveredLines = lists:filter(FilterF, CoveredLines0), + case proplists:get_value(source, compile_info(S, Mod), "") of + "" -> L; + SrcFile -> + {ok, SrcBin} = read_file(S, SrcFile), + Src0 = lists:flatten(io_lib:format("~s", [SrcBin])), + SrcDigest = erlang:md5(SrcBin), + LinesCount = count_lines(Src0), + Cov = create_cov(CoveredLines, LinesCount), + [#{name => unicode:characters_to_binary(relative_to_cwd(SrcFile), utf8, utf8), + source_digest => hex(SrcDigest), + coverage => Cov} + | L] + end. + +expand(Path) -> expand(filename:split(Path), []). + +expand([], Acc) -> filename:join(lists:reverse(Acc)); +expand(["."|Tail], Acc) -> expand(Tail, Acc); +expand([".."|Tail], []) -> expand(Tail, []); +expand([".."|Tail], [_|Acc]) -> expand(Tail, Acc); +expand([Segment|Tail], Acc) -> expand(Tail, [Segment|Acc]). + +realpath(Path) -> realpath(filename:split(Path), "./"). + +realpath([], Acc) -> filename:absname(expand(Acc)); +realpath([Head | Tail], Acc) -> + NewAcc0 = filename:join([Acc, Head]), + NewAcc = case file:read_link(NewAcc0) of + {ok, Link} -> + case filename:pathtype(Link) of + absolute -> realpath(Link); + relative -> filename:join([Acc, Link]) + end; + _ -> NewAcc0 + end, + realpath(Tail, NewAcc). + +relative_to_cwd(Path) -> + case file:get_cwd() of + {ok, Base} -> relative_to(Path, Base); + _ -> Path + end. + +relative_to(Path, From) -> + Path1 = realpath(Path), + relative_to(filename:split(Path1), filename:split(From), Path). + +relative_to([H|T1], [H|T2], Original) -> relative_to(T1, T2, Original); +relative_to([_|_] = L1, [], _Original) -> filename:join(L1); +relative_to(_, _, Original) -> Original. + +create_cov(_CoveredLines, []) -> + []; +create_cov(CoveredLines, LinesCount) when is_integer(LinesCount) -> + create_cov(CoveredLines, lists:seq(1, LinesCount)); +create_cov([{{_,LineNo},Count}|CoveredLines], [LineNo|LineNos]) -> + [Count | create_cov(CoveredLines, LineNos)]; +create_cov(CoveredLines, [_|LineNos]) -> + [null | create_cov(CoveredLines, LineNos)]. + +%%----------------------------------------------------------------------------- +%% Generic helpers + +count_lines("") -> 1; +count_lines("\n") -> 1; +count_lines([$\n|S]) -> 1 + count_lines(S); +count_lines([_|S]) -> count_lines(S). + +%%============================================================================= +%% Tests + +-ifdef(TEST). +-define(DEBUG, true). +-include_lib("eunit/include/eunit.hrl"). + +normalize_json_str(Str) when is_binary(Str) -> + jsx:encode(jsx:decode(Str, [return_maps, {labels, existing_atom}])); +normalize_json_str(Str) when is_list(Str) -> + normalize_json_str(iolist_to_binary(Str)). + +convert_file_test() -> + Expected = + jsx:decode( + <<"{\"service_job_id\": \"1234567890\"," + " \"service_name\": \"travis-ci\"," + " \"source_files\": [" + " {\"name\": \"example.rb\"," + " \"source_digest\": \"3feb892deff06e7accbe2457eec4cd8b\"," + " \"coverage\": [null,1,null]" + " }," + " {\"name\": \"two.rb\"," + " \"source_digest\": \"fce46ee19702bd262b2e4907a005aff4\"," + " \"coverage\": [null,1,0,null]" + " }" + " ]" + "}">>, [return_maps, {labels, existing_atom}]), + Report = #{service_job_id => <<"1234567890">>, + service_name => <<"travis-ci">>}, + Got = jsx:decode( + convert_file("example.rb", Report, mock_s()), + [return_maps, {labels, existing_atom}]), + ?assertEqual(Expected, Got). + +convert_and_send_file_test() -> + Expected = + normalize_json_str( + "{\"service_job_id\": \"1234567890\"," + " \"service_name\": \"travis-ci\"," + " \"source_files\": [" + " {\"name\": \"example.rb\"," + " \"source_digest\": \"3feb892deff06e7accbe2457eec4cd8b\"," + " \"coverage\": [null,1,null]" + " }," + " {\"name\": \"two.rb\"," + " \"source_digest\": \"fce46ee19702bd262b2e4907a005aff4\"," + " \"coverage\": [null,1,0,null]" + " }" + " ]" + "}"), + Report = #{service_job_id => <<"1234567890">>, + service_name => <<"travis-ci">>}, + ?assertEqual(ok, convert_and_send_file("example.rb", Report, mock_s(Expected))). + +send_test_() -> + Expected = + normalize_json_str( + "{\"service_job_id\": \"1234567890\",\n" + " \"service_name\": \"travis-ci\",\n" + " \"source_files\": [\n" + " {\"name\": \"example.rb\",\n" + " \"source_digest\": \"\tdef four\\n 4\\nend\",\n" + " \"coverage\": [null,1,null]\n" + " }" + " ]" + "}"), + [ ?_assertEqual(ok, send(Expected, mock_s(Expected))) + , ?_assertThrow({error, {_,_}}, send("foo", mock_s(<<"bar">>))) + ]. + +%%----------------------------------------------------------------------------- +%% Generic helpers tests + +count_lines_test_() -> + [ ?_assertEqual(1, count_lines("")) + , ?_assertEqual(1, count_lines("foo")) + , ?_assertEqual(1, count_lines("bar\n")) + , ?_assertEqual(2, count_lines("foo\nbar")) + , ?_assertEqual(3, count_lines("foo\n\nbar")) + , ?_assertEqual(2, count_lines("foo\nbar\n")) + ]. + +expand_test_() -> + [ ?_assertEqual("/a/b", expand(["/", "a", "b"], [])) + , ?_assertEqual("a/c" , expand(["a", "b", "..", ".", "c"], [])) + , ?_assertEqual("/" , expand(["..", ".", "/"], [])) + ]. + +realpath_and_relative_test_() -> + {setup, + fun() -> %% setup + {ok, Cwd} = file:get_cwd(), + Root = string:strip( + os:cmd("mktemp -d -t coveralls_tests.XXX"), right, $\n), + ok = file:set_cwd(Root), + {Cwd, Root} + end, + fun({Cwd, _Root}) -> %% teardown + ok = file:set_cwd(Cwd) + end, + fun({_Cwd, Root}) -> %% tests + Filename = "file", + Dir1 = filename:join([Root, "_test_src", "dir1"]), + Dir2 = filename:join([Root, "_test_src", "dir2"]), + File1 = filename:join([Dir1, Filename]), + File2 = filename:join([Dir2, Filename]), + Link1 = filename:join([ Root + , "_test_build" + , "default" + , "lib" + , "mylib" + , "src" + , "dir1" + ]), + Link2 = filename:join([ Root + , "_test_build" + , "default" + , "lib" + , "mylib" + , "src" + , "dir2" + ]), + [ ?_assertEqual(ok, + filelib:ensure_dir(filename:join([Dir1, "dummy"]))) + , ?_assertEqual(ok, + filelib:ensure_dir(filename:join([Dir2, "dummy"]))) + , ?_assertEqual(ok, + file:write_file(File1, "data")) + , ?_assertEqual(ok, + file:write_file(File2, "data")) + , ?_assertEqual(ok, + filelib:ensure_dir(Link1)) + , ?_assertEqual(ok, + filelib:ensure_dir(Link2)) + , ?_assertEqual(ok, + file:make_symlink(Dir1, Link1)) + , ?_assertEqual(ok, + file:make_symlink(filename:join([ ".." + , ".." + , ".." + , ".." + , ".." + , "_test_src" + , "dir2" + ]) + , Link2)) + , ?_assertEqual(realpath(File1), + realpath(filename:join([Link1, Filename]))) + , ?_assertEqual(realpath(File2), + realpath(filename:join([Link2, Filename]))) + , ?_assertEqual(realpath(File1), + filename:absname( + relative_to_cwd( + filename:join([Link1, Filename])))) + , ?_assertEqual(realpath(File2), + filename:absname( + relative_to_cwd( + filename:join([Link2, Filename])))) + ] + end}. + +%%----------------------------------------------------------------------------- +%% Callback mockery tests +module_info_compile_test() -> + ?assert(is_tuple(lists:keyfind(source, 1, module_info_compile(?MODULE)))). + +start_wrapper_test_() -> + F = fun() -> ok end, + StartedF = fun() -> {error,{already_started,mod}} end, + ErrorF = fun() -> {error, {error, mod}} end, + [ ?_assertEqual(ok, (start_wrapper([F, StartedF]))()) + , ?_assertError(_, (start_wrapper([F, StartedF, ErrorF]))()) + ]. + +%%----------------------------------------------------------------------------- +%% Converting modules tests + +create_cov_test() -> + ?assertEqual([null, 3, null, 4, null], + create_cov([{{foo, 2}, 3}, {{foo, 4}, 4}], 5)). + +convert_module_test() -> + Expected = + [#{name => <<"example.rb">>, + source_digest => <<"3feb892deff06e7accbe2457eec4cd8b">>, + coverage => [null,1,null]}], + ?assertEqual(Expected, convert_module('example.rb', mock_s(), [])). + +convert_modules_test() -> + Expected = + [#{name => <<"example.rb">>, + source_digest => <<"3feb892deff06e7accbe2457eec4cd8b">>, + coverage => [null,1,null] + }, + #{name => <<"two.rb">>, + source_digest => <<"fce46ee19702bd262b2e4907a005aff4">>, + coverage => [null,1,0,null] + }], + ?assertEqual(Expected, + convert_modules(mock_s())). + +%%----------------------------------------------------------------------------- +%% Setup helpers + +mock_s() -> mock_s(""). + +mock_s(Json) -> + #s{ importer = + fun(_) -> ok end + , module_lister = + fun() -> ['example.rb', 'two.rb'] end + , mod_info = + fun('example.rb') -> [{source,"example.rb"}]; + ('two.rb') -> [{source,"two.rb"}] + end + , file_reader = + fun("example.rb") -> + {ok, <<"def four\n 4\nend">>}; + ("two.rb") -> + {ok, <<"def seven\n eight\n nine\nend">>} + end + , wildcard_reader = fun(AnyFile) -> [AnyFile] end + , analyser = + fun('example.rb' , calls, line) -> {ok, [ {{'example.rb', 2}, 1} ]}; + ('two.rb' , calls, line) -> {ok, [ {{'two.rb', 2}, 1} + , {{'two.rb', 3}, 0} + ] + } + end + , poster_init = + fun() -> ok end + , poster = + fun(post, {_, _, _, Body}, _, _) -> + case binary:match(Body, Json) =/= nomatch of + true -> {ok, {{"", 200, ""}, "", ""}}; + false -> {ok, {{"", 666, ""}, "", "Not expected"}} + end + end + }. + +-endif. + +%%% Local Variables: +%%% allout-layout: t +%%% erlang-indent-level: 2 +%%% End: diff --git a/server/_build/default/plugins/coveralls/src/rebar3_coveralls.erl b/server/_build/default/plugins/coveralls/src/rebar3_coveralls.erl new file mode 100644 index 0000000..01084ee --- /dev/null +++ b/server/_build/default/plugins/coveralls/src/rebar3_coveralls.erl @@ -0,0 +1,220 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% Copyright (c) 2013-2016, Markus Ekholm +%%% All rights reserved. +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions are met: +%%% * Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% * Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% * Neither the name of the <organization> nor the +%%% names of its contributors may be used to endorse or promote products +%%% derived from this software without specific prior written permission. +%%% +%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +%%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL MARKUS EKHOLM BE LIABLE FOR ANY +%%% DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +%%% (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +%%% LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +%%% ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +%%% THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +%%% +%%% @copyright 2013-2016 (c) Yury Gargay <yury.gargay@gmail.com>, +%%% Markus Ekholm <markus@botten.org> +%%% @end +%%% @author Yury Gargay <yury.gargay@gmail.com> +%%% @author Markus Ekholm <markus@botten.org> +%%% @doc coveralls plugin for rebar3 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +-module(rebar3_coveralls). +-behaviour(provider). + +-export([ init/1 + , do/1 + , format_error/1 + ]). + +-define(PROVIDER, send). +-define(DEPS, [{default, app_discovery}]). + +%% =================================================================== +%% Public API +%% =================================================================== +-spec init(rebar_state:t()) -> {ok, rebar_state:t()}. +init(State) -> + Provider = providers:create([ {name, ?PROVIDER} + , {module, ?MODULE} + , {namespace, coveralls} + , {bare, true} + , {deps, ?DEPS} + , {example, "rebar3 coveralls send"} + , {short_desc, "Send coverdata to coveralls."} + , {desc, "Send coveralls to coveralls."} + , {opts, []} + ]), + {ok, rebar_state:add_provider(State, Provider)}. + +-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}. +do(State) -> + rebar_api:info("Running coveralls...", []), + ConvertAndSend = fun coveralls:convert_and_send_file/2, + Get = fun(Key, Def) -> rebar_state:get(State, Key, Def) end, + GetLocal = fun(Key, Def) -> rebar_state:get(State, Key, Def) end, + MaybeSkip = fun() -> ok end, + ok = cover_paths(State), + try + do_coveralls(ConvertAndSend, + Get, + GetLocal, + MaybeSkip, + 'send-coveralls'), + {ok, State} + catch throw:{error, {ErrCode, Msg}} -> + io:format("Failed sending coverdata to coveralls, ~p: ~p", + [ErrCode, Msg]), + {error, rebar_abort} + end. + +-spec format_error(any()) -> iolist(). +format_error(Reason) -> + io_lib:format("~p", [Reason]). + +cover_paths(State) -> + lists:foreach(fun(App) -> + AppDir = rebar_app_info:out_dir(App), + true = code:add_patha(filename:join([AppDir, "ebin"])), + _ = code:add_patha(filename:join([AppDir, "test"])) + end, + rebar_state:project_apps(State)), + _ = code:add_patha(filename:join([rebar_dir:base_dir(State), "test"])), + ok. + +%%============================================================================= +%% Internal functions + +to_binary(List) when is_list(List) -> + unicode:characters_to_binary(List, utf8, utf8); +to_binary(Atom) when is_atom(Atom) -> + atom_to_binary(Atom, utf8); +to_binary(Bin) when is_binary(Bin) -> + Bin. +to_boolean(true) -> true; +to_boolean(1) -> true; +to_boolean(_) -> false. + +do_coveralls(ConvertAndSend, Get, GetLocal, MaybeSkip, Task) -> + File = GetLocal(coveralls_coverdata, undef), + ServiceName = to_binary(GetLocal(coveralls_service_name, undef)), + ServiceJobId = to_binary(GetLocal(coveralls_service_job_id, undef)), + F = fun(X) -> X =:= undef orelse X =:= false end, + CoverExport = Get(cover_export_enabled, false), + case lists:any(F, [File, ServiceName, ServiceJobId, CoverExport]) of + true -> + throw({error, + "need to specify coveralls_* and cover_export_enabled " + "in rebar.config"}); + false -> + ok + end, + + Report0 = + #{service_job_id => ServiceJobId, + service_name => ServiceName}, + Opts = [{coveralls_repo_token, repo_token, string}, + {coveralls_service_pull_request, service_pull_request, string}, + {coveralls_commit_sha, commit_sha, string}, + {coveralls_service_number, service_number, string}, + {coveralls_parallel, parallel, boolean}], + Report = + lists:foldl(fun({Cfg, Key, Conv}, R) -> + case GetLocal(Cfg, undef) of + undef -> R; + Value when Conv =:= string -> maps:put(Key, to_binary(Value), R); + Value when Conv =:= boolean -> maps:put(Key, to_boolean(Value), R); + Value -> maps:put(Key, Value, R) + end + end, Report0, Opts), + + DoCoveralls = (GetLocal(do_coveralls_after_ct, true) andalso Task == ct) + orelse (GetLocal(do_coveralls_after_eunit, true) andalso Task == eunit) + orelse Task == 'send-coveralls', + case DoCoveralls of + true -> + io:format("rebar_coveralls:" + "Exporting cover data " + "from ~s using service ~s and jobid ~s~n", + [File, ServiceName, ServiceJobId]), + ok = ConvertAndSend(File, Report); + _ -> MaybeSkip() + end. + + +%%============================================================================= +%% Tests + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +task_test_() -> + File = "foo", + ServiceJobId = "123", + ServiceName = "bar", + ConvertAndSend = fun("foo", #{service_job_id := <<"123">>, + service_name := <<"bar">>}) -> ok end, + ConvertWithOpts = fun("foo", #{service_job_id := <<"123">>, + service_name := <<"bar">>, + service_pull_request := <<"PR#1">>, + parallel := true}) -> ok + end, + Get = fun(cover_export_enabled, _) -> true end, + GetLocal = fun(coveralls_coverdata, _) -> File; + (coveralls_service_name, _) -> ServiceName; + (coveralls_service_job_id, _) -> ServiceJobId; + (do_coveralls_after_eunit, _) -> true; + (do_coveralls_after_ct, _) -> true; + (coveralls_repo_token, _) -> []; + (_, Default) -> Default + end, + GetLocalAllOpt = fun(coveralls_coverdata, _) -> File; + (coveralls_service_name, _) -> ServiceName; + (coveralls_service_job_id, _) -> ServiceJobId; + (coveralls_service_pull_request, _) -> "PR#1"; + (coveralls_parallel, _) -> true; + (do_coveralls_after_eunit, _) -> true; + (do_coveralls_after_ct, _) -> true; + (coveralls_repo_token, _) -> []; + (_, Default) -> Default + end, + GetLocalWithCoverallsTask + = fun(coveralls_coverdata, _) -> File; + (coveralls_service_name, _) -> ServiceName; + (coveralls_service_job_id, _) -> ServiceJobId; + (do_coveralls_after_eunit, _) -> false; + (do_coveralls_after_ct, _) -> false; + (coveralls_repo_token, _) -> []; + (_, Default) -> Default + end, + GetBroken = fun(cover_export_enabled, _) -> false end, + MaybeSkip = fun() -> skip end, + [ ?_assertEqual(ok, do_coveralls(ConvertAndSend, Get, GetLocal, MaybeSkip, eunit)) + , ?_assertEqual(ok, do_coveralls(ConvertAndSend, Get, GetLocal, MaybeSkip, ct)) + , ?_assertThrow({error, _}, do_coveralls(ConvertAndSend, GetBroken, GetLocal, MaybeSkip, eunit)) + , ?_assertThrow({error, _}, do_coveralls(ConvertAndSend, GetBroken, GetLocal, MaybeSkip, ct)) + , ?_assertEqual(skip, do_coveralls(ConvertAndSend, Get, GetLocalWithCoverallsTask, MaybeSkip, eunit)) + , ?_assertEqual(skip, do_coveralls(ConvertAndSend, Get, GetLocalWithCoverallsTask, MaybeSkip, ct)) + , ?_assertEqual(ok, do_coveralls(ConvertAndSend, Get, GetLocalWithCoverallsTask, MaybeSkip, 'send-coveralls')) + , ?_assertEqual(ok, do_coveralls(ConvertWithOpts, Get, GetLocalAllOpt, MaybeSkip, eunit)) + , ?_assertEqual(ok, do_coveralls(ConvertWithOpts, Get, GetLocalAllOpt, MaybeSkip, ct)) + ]. + +-endif. + +%%% Local Variables: +%%% allout-layout: t +%%% erlang-indent-level: 2 +%%% End: diff --git a/server/_build/default/plugins/jsx/CHANGES.md b/server/_build/default/plugins/jsx/CHANGES.md new file mode 100644 index 0000000..8f404be --- /dev/null +++ b/server/_build/default/plugins/jsx/CHANGES.md @@ -0,0 +1,214 @@ +v2.8.2 + +* enable `debug_info` for rebar3 + +v2.8.1 + +* enable `debug_info` when used via mix +* accept `erlang:timestamp` as input to the parser + + +v2.8.0 + +* add `JSX_FORCE_MAPS` env var for forcing decoding to maps rather than + attempting to autodetect + +v2.7.2 + +* fix an issue where tuples were assumed to be jsx ast and not checked +* mask a `function_clause` error in encoder with a `badarg` error for api unity + +v2.7.1 + +* support for milliseconds in datetimes + +v2.7.0 + +* `return_tail` option +* fixes for edoc generation + +v2.6.2 + +* ensure maps are always enabled when compiling via mix + +v2.6.1 + +* hex.pm maintenance release + +v2.6.0 + +* equivalent to v2.5.3 but created for semver reasons + +v2.5.3 + +* add a `mix.exs` to be buildable by both mix and rebar +* minor README updates + +v2.5.2 + +* fix regression parsing <<"-0e...">> (thanks @c-bik) + +v2.5.1 + +* assume all datetimes are UTC time and add `Z` designator to indicate +* fix parsing issue with datetimes in arrays + +v2.5.0 + +* `consult/2` function for reading a file directly to a json term +* `maps_always` build flag for always returning maps on platforms + that support them +* dialyzer fixes + +v2.4.0 + +* enough performance improvements to justify a new version. 2-3x + speedup depending on mode of operation + +v2.3.1 + +* fixes an issue where astral plane json escape sequences were + inadvertently being converted to the unicode replacement + character + +v2.3 + +* switched to a faster implementation of string parsing in both + the decoder and encoder +* expand `uescape` option to the decoder +* allow control codes in json passed to decoder (contrary to the spec, + yes) + +v2.2 + +* `return_maps` option +* `uescape` option for 7-bit clean output +* add `Makefile` for slightly better `erlang.mk` compatibility +* add `maps_support/0` call to determine whether `jsx` was compiled + with support for maps or not + +v2.1.1 + +* faster generation of json via iolists +* `repeat_keys` option + +v2.1 + +* force the end of streams with `end_json` in addition to `end_stream` +* support for encoding erlang datetime tuples to iso8601 format +* allow a single trailing comma in objects and arrays + +v2.0.4 + +* more typespec adjustments + +v2.0.3 + +* update some typespecs to make them more comprehensive + +v2.0.2 + +* fixes travis-ci spec + +v2.0.1 + +* fix regression in output of empty objects/arrays + +v2.0 + +* jsx is much more pragmatic by default; common json errors are silently + ignored (and fixed). stricter parsing must be enabled with options +* add support for encoding otp 17.0's new maps data type +* removed `pre_encode` and `post_decode` options in favour of making jsx + functions easier to wrap and customize +* streaming behavior is now disabled by default and must be requested explicitly +* removed deprecated function names (`to_json`, `to_term`, `term_to_json`, etc) +* expanded test coverage + + +v1.4.5 + +* various fixes to typespecs uncovered by dialyzer +* allow integer keys during encoding +* convert atoms (other than `true`, `false` and `null`) to strings during encoding + +v1.4.4 + +* typespec for `json_term/0` fixed +* incorrect boolean shortcircuiting fixed in multibyte escape processing + +v1.4.3 + +* add empty rebar.config for mix build tool +* add `attempt_atom` option for decoding json objects +* fix a bug related to multibyte codepoints and streaming input +* add a missing error state in the encoder + +v1.4.2 + +* build apparatus cleaned up and streamlined +* new `{raw, <<"json goes here">>}` intermediate form to support direct generation of json +* bugfixes involving inappropriate exceptions from jsx functions + +v1.4.1 + +* fixes a bug with interaction between `dirty_strings` and even numbers of escape characters +* performance enhancements + +v1.4 + +* radically refactored decoder +* `dirty_strings` now behaves intuitively in decoding. bad codepoints, bad utf8, illegal characters and escapes (except `"` and `'` if `single_quoted_strings` is enabled) are ignored completely +* `incomplete_handler` & `error_handler` are now available for use, see documentation in README + +v1.3.3 + +* `pre_encode` now orders input in the order you'd expect + +v1.3.2 + +* `pre_encode` is now able to handle tuples *correctly* + +v1.3.1 + +* `pre_encode` is now able to handle tuples + +v1.3 + +* introduces `prettify/1` and `minify/1`, shortcuts for `format/2` +* introduce `encode/1,2` and `decode/1,2` as primary interface to built in tokenizers. `to_json/1,2` and `to_term/1,2` remain accessible but not advertised +* new `parser/3` function exposes syntactic analysis stage for use with user defined tokenizers +* improved documentation + +v1.2.1 + +* fixes incorrect handling of escaped forward slashes, thanks bob ippolito + +v1.2 + +* rewritten handling of string escaping to improve performance +* `pre_encode` and `post_decode` hooks, see README +* `relax` option + +v1.1.2 + +* add `dirty_strings` option +* more fixes for invalid unicode in strings + +v1.1.1 + +* fixes bug regarding handling of invalid unicode in R14Bxx + +v1.1 + +* improvements to string escaping and json generation performance + +v1.0.2 + +* fixes to function specs +* rewritten README +* `comments` option + +v1.0.1 + +* rebar fix diff --git a/server/_build/default/plugins/jsx/LICENSE b/server/_build/default/plugins/jsx/LICENSE new file mode 100644 index 0000000..de1b470 --- /dev/null +++ b/server/_build/default/plugins/jsx/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/server/_build/default/plugins/jsx/ebin/jsx.app b/server/_build/default/plugins/jsx/ebin/jsx.app new file mode 100644 index 0000000..9640152 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx.app @@ -0,0 +1,11 @@ +{application,jsx, + [{description,"a streaming, evented json parsing toolkit"}, + {vsn,"2.10.0"}, + {modules,[jsx,jsx_config,jsx_consult,jsx_decoder,jsx_encoder, + jsx_parser,jsx_to_json,jsx_to_term,jsx_verify]}, + {registered,[]}, + {applications,[kernel,stdlib]}, + {env,[]}, + {files,["src","rebar.config","rebar.config.script", + "rebar.lockREADME.md","CHANGES.md","LICENSE"]}, + {licenses,["MIT"]}]}. diff --git a/server/_build/default/plugins/jsx/ebin/jsx.beam b/server/_build/default/plugins/jsx/ebin/jsx.beam Binary files differnew file mode 100644 index 0000000..4621425 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_config.beam b/server/_build/default/plugins/jsx/ebin/jsx_config.beam Binary files differnew file mode 100644 index 0000000..26bff71 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_config.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_consult.beam b/server/_build/default/plugins/jsx/ebin/jsx_consult.beam Binary files differnew file mode 100644 index 0000000..eee3fc3 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_consult.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_decoder.beam b/server/_build/default/plugins/jsx/ebin/jsx_decoder.beam Binary files differnew file mode 100644 index 0000000..da0efad --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_decoder.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_encoder.beam b/server/_build/default/plugins/jsx/ebin/jsx_encoder.beam Binary files differnew file mode 100644 index 0000000..8014a51 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_encoder.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_parser.beam b/server/_build/default/plugins/jsx/ebin/jsx_parser.beam Binary files differnew file mode 100644 index 0000000..91fe7b2 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_parser.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_to_json.beam b/server/_build/default/plugins/jsx/ebin/jsx_to_json.beam Binary files differnew file mode 100644 index 0000000..b5cbd7b --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_to_json.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_to_term.beam b/server/_build/default/plugins/jsx/ebin/jsx_to_term.beam Binary files differnew file mode 100644 index 0000000..5707d46 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_to_term.beam diff --git a/server/_build/default/plugins/jsx/ebin/jsx_verify.beam b/server/_build/default/plugins/jsx/ebin/jsx_verify.beam Binary files differnew file mode 100644 index 0000000..e2b0ca9 --- /dev/null +++ b/server/_build/default/plugins/jsx/ebin/jsx_verify.beam diff --git a/server/_build/default/plugins/jsx/hex_metadata.config b/server/_build/default/plugins/jsx/hex_metadata.config new file mode 100644 index 0000000..e4aa92d --- /dev/null +++ b/server/_build/default/plugins/jsx/hex_metadata.config @@ -0,0 +1,15 @@ +{<<"app">>,<<"jsx">>}. +{<<"build_tools">>,[<<"rebar3">>]}. +{<<"description">>,<<"a streaming, evented json parsing toolkit">>}. +{<<"files">>, + [<<"src/jsx.app.src">>,<<"CHANGES.md">>,<<"LICENSE">>,<<"rebar.config">>, + <<"rebar.config.script">>,<<"src/jsx.erl">>,<<"src/jsx_config.erl">>, + <<"src/jsx_config.hrl">>,<<"src/jsx_consult.erl">>, + <<"src/jsx_decoder.erl">>,<<"src/jsx_encoder.erl">>, + <<"src/jsx_parser.erl">>,<<"src/jsx_to_json.erl">>, + <<"src/jsx_to_term.erl">>,<<"src/jsx_verify.erl">>]}. +{<<"licenses">>,[<<"MIT">>]}. +{<<"links">>,[]}. +{<<"name">>,<<"jsx">>}. +{<<"requirements">>,[]}. +{<<"version">>,<<"2.10.0">>}. diff --git a/server/_build/default/plugins/jsx/rebar.config b/server/_build/default/plugins/jsx/rebar.config new file mode 100644 index 0000000..a647508 --- /dev/null +++ b/server/_build/default/plugins/jsx/rebar.config @@ -0,0 +1,2 @@ +{edoc_opts, [{preprocess, true}]}. +{erl_opts, [debug_info]}. diff --git a/server/_build/default/plugins/jsx/rebar.config.script b/server/_build/default/plugins/jsx/rebar.config.script new file mode 100644 index 0000000..5841b7d --- /dev/null +++ b/server/_build/default/plugins/jsx/rebar.config.script @@ -0,0 +1,15 @@ +Def0 = case erlang:is_builtin(erlang, binary_to_integer, 1) andalso + erlang:is_builtin(erlang, binary_to_float, 1) of + true -> []; + false -> [{d, no_binary_to_whatever}] + end, +Def1 = case erlang:is_builtin(erlang, is_map, 1) of + true -> [{d, maps_support}|Def0]; + false -> Def0 + end, +Defs = case os:getenv("JSX_FORCE_MAPS") of + false -> Def1; + _ -> [{d, maps_always}|Def1] + end, +lists:keystore(erl_opts, 1, CONFIG, + {erl_opts, proplists:get_value(erl_opts, CONFIG, []) ++ Defs}). diff --git a/server/_build/default/plugins/jsx/src/jsx.app.src b/server/_build/default/plugins/jsx/src/jsx.app.src new file mode 100644 index 0000000..b67880d --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx.app.src @@ -0,0 +1,11 @@ +{application,jsx, + [{description,"a streaming, evented json parsing toolkit"}, + {vsn,"2.10.0"}, + {modules,[jsx,jsx_encoder,jsx_decoder,jsx_parser,jsx_to_json, + jsx_to_term,jsx_config,jsx_verify]}, + {registered,[]}, + {applications,[kernel,stdlib]}, + {env,[]}, + {files,["src","rebar.config","rebar.config.script", + "rebar.lockREADME.md","CHANGES.md","LICENSE"]}, + {licenses,["MIT"]}]}. diff --git a/server/_build/default/plugins/jsx/src/jsx.erl b/server/_build/default/plugins/jsx/src/jsx.erl new file mode 100644 index 0000000..acdf420 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx.erl @@ -0,0 +1,527 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx). + +-export([encode/1, encode/2, decode/1, decode/2]). +-export([is_json/1, is_json/2, is_term/1, is_term/2]). +-export([format/1, format/2, minify/1, prettify/1]). +-export([consult/1, consult/2]). +-export([encoder/3, decoder/3, parser/3]). +-export([resume/3]). +-export([maps_support/0]). + +-export_type([json_term/0, json_text/0, token/0]). +-export_type([encoder/0, decoder/0, parser/0, internal_state/0]). +-export_type([config/0]). + + +-ifdef(TEST). +%% data and helper functions for tests +-export([test_cases/0, special_test_cases/0]). +-export([init/1, handle_event/2]). +-endif. + + +-ifndef(maps_support). +-type json_term() :: [{binary() | atom(), json_term()}] | [{},...] + | [json_term()] | [] + | true | false | null + | integer() | float() + | binary() | atom() + | calendar:datetime(). +-endif. + +-ifdef(maps_support). +-type json_term() :: [{binary() | atom(), json_term()}] | [{},...] + | [json_term()] | [] + | map() + | true | false | null + | integer() | float() + | binary() | atom() + | calendar:datetime(). +-endif. + +-type json_text() :: binary(). + +-type config() :: jsx_config:config(). + +-spec encode(Source::json_term()) -> json_text(). + +encode(Source) -> encode(Source, []). + +-spec encode(Source::json_term(), Config::jsx_to_json:config()) -> json_text() | {incomplete, encoder()}. + +encode(Source, Config) -> jsx_to_json:to_json(Source, Config). + + +-spec decode(Source::json_text()) -> json_term(). + +decode(Source) -> decode(Source, []). + +-spec decode(Source::json_text(), Config::jsx_to_term:config()) -> json_term() | {incomplete, decoder()}. + +decode(Source, Config) -> jsx_to_term:to_term(Source, Config). + + +-spec format(Source::json_text()) -> json_text(). + +format(Source) -> format(Source, []). + +-spec format(Source::json_text(), Config::jsx_to_json:config()) -> json_text() | {incomplete, decoder()}. + +format(Source, Config) -> jsx_to_json:format(Source, Config). + + +-spec minify(Source::json_text()) -> json_text(). + +minify(Source) -> format(Source, []). + + +-spec prettify(Source::json_text()) -> json_text(). + +prettify(Source) -> format(Source, [space, {indent, 2}]). + + +-spec is_json(Source::any()) -> boolean(). + +is_json(Source) -> is_json(Source, []). + +-spec is_json(Source::any(), Config::jsx_verify:config()) -> boolean() | {incomplete, decoder()}. + +is_json(Source, Config) -> jsx_verify:is_json(Source, Config). + + +-spec is_term(Source::any()) -> boolean(). + +is_term(Source) -> is_term(Source, []). + +-spec is_term(Source::any(), Config::jsx_verify:config()) -> boolean() | {incomplete, encoder()}. + +is_term(Source, Config) -> jsx_verify:is_term(Source, Config). + + +-spec consult(File::file:name_all()) -> list(json_term()). + +consult(File) -> consult(File, []). + +-spec consult(File::file:name_all(), Config::jsx_to_term:config()) -> list(json_term()). + +consult(File, Config) -> jsx_consult:consult(File, Config). + + +-type decoder() :: fun((json_text() | end_stream | end_json) -> any()). + +-spec decoder(Handler::module(), State::any(), Config::list()) -> decoder(). + +decoder(Handler, State, Config) -> jsx_decoder:decoder(Handler, State, Config). + + +-type encoder() :: fun((json_term() | end_stream | end_json) -> any()). + +-spec encoder(Handler::module(), State::any(), Config::list()) -> encoder(). + +encoder(Handler, State, Config) -> jsx_encoder:encoder(Handler, State, Config). + + +-type token() :: [token()] + | start_object + | end_object + | start_array + | end_array + | {key, binary()} + | {string, binary()} + | binary() + | {number, integer() | float()} + | {integer, integer()} + | {float, float()} + | integer() + | float() + | {literal, true} + | {literal, false} + | {literal, null} + | true + | false + | null + | end_json. + + +-type parser() :: fun((token() | end_stream) -> any()). + +-spec parser(Handler::module(), State::any(), Config::list()) -> parser(). + +parser(Handler, State, Config) -> jsx_parser:parser(Handler, State, Config). + +-opaque internal_state() :: tuple(). + +-spec resume(Term::json_text() | token(), InternalState::internal_state(), Config::list()) -> any(). + +resume(Term, {decoder, State, Handler, Acc, Stack}, Config) -> + jsx_decoder:resume(Term, State, Handler, Acc, Stack, jsx_config:parse_config(Config)); +resume(Term, {parser, State, Handler, Stack}, Config) -> + jsx_parser:resume(Term, State, Handler, Stack, jsx_config:parse_config(Config)). + + +-spec maps_support() -> boolean(). + +-ifndef(maps_support). +maps_support() -> false. +-endif. +-ifdef(maps_support). +maps_support() -> true. +-endif. + + +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + + +%% test handler +init([]) -> []. + +handle_event(end_json, State) -> lists:reverse([end_json] ++ State); +handle_event(Event, State) -> [Event] ++ State. + + +test_cases() -> + empty_array() + ++ nested_array() + ++ empty_object() + ++ nested_object() + ++ strings() + ++ literals() + ++ integers() + ++ floats() + ++ compound_object(). + +%% segregate these so we can skip them in `jsx_to_term` +special_test_cases() -> special_objects() ++ special_array(). + + +empty_array() -> [{"[]", <<"[]">>, [], [start_array, end_array]}]. + + +nested_array() -> + [{ + "[[[]]]", + <<"[[[]]]">>, + [[[]]], + [start_array, start_array, start_array, end_array, end_array, end_array] + }]. + + +empty_object() -> [{"{}", <<"{}">>, [{}], [start_object, end_object]}]. + + +nested_object() -> + [{ + "{\"key\":{\"key\":{}}}", + <<"{\"key\":{\"key\":{}}}">>, + [{<<"key">>, [{<<"key">>, [{}]}]}], + [ + start_object, + {key, <<"key">>}, + start_object, + {key, <<"key">>}, + start_object, + end_object, + end_object, + end_object + ] + }]. + + +naked_strings() -> + Raw = [ + "", + "hello world" + ], + [ + { + String, + <<"\"", (list_to_binary(String))/binary, "\"">>, + list_to_binary(String), + [{string, list_to_binary(String)}] + } + || String <- Raw + ]. + + +strings() -> + naked_strings() + ++ [ wrap_with_array(Test) || Test <- naked_strings() ] + ++ [ wrap_with_object(Test) || Test <- naked_strings() ]. + + +naked_integers() -> + Raw = [ + 1, 2, 3, + 127, 128, 129, + 255, 256, 257, + 65534, 65535, 65536, + 18446744073709551616, + 18446744073709551617 + ], + [ + { + integer_to_list(X), + list_to_binary(integer_to_list(X)), + X, + [{integer, X}] + } + || X <- Raw ++ [ -1 * Y || Y <- Raw ] ++ [0] + ]. + + +integers() -> + naked_integers() + ++ [ wrap_with_array(Test) || Test <- naked_integers() ] + ++ [ wrap_with_object(Test) || Test <- naked_integers() ]. + + +naked_floats() -> + Raw = [ + 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, + 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, + 1234567890.0987654321, + 0.0e0, + 1234567890.0987654321e16, + 0.1e0, 0.1e1, 0.1e2, 0.1e4, 0.1e8, 0.1e16, 0.1e308, + 1.0e0, 1.0e1, 1.0e2, 1.0e4, 1.0e8, 1.0e16, 1.0e308, + 2.2250738585072014e-308, %% min normalized float + 1.7976931348623157e308, %% max normalized float + 5.0e-324, %% min denormalized float + 2.225073858507201e-308 %% max denormalized float + ], + [ + { + sane_float_to_list(X), + list_to_binary(sane_float_to_list(X)), + X, + [{float, X}] + } + || X <- Raw ++ [ -1 * Y || Y <- Raw ] + ]. + + +floats() -> + naked_floats() + ++ [ wrap_with_array(Test) || Test <- naked_floats() ] + ++ [ wrap_with_object(Test) || Test <- naked_floats() ]. + + +naked_literals() -> + [ + { + atom_to_list(Literal), + atom_to_binary(Literal, unicode), + Literal, + [{literal, Literal}] + } + || Literal <- [true, false, null] + ]. + + +literals() -> + naked_literals() + ++ [ wrap_with_array(Test) || Test <- naked_literals() ] + ++ [ wrap_with_object(Test) || Test <- naked_literals() ]. + + +compound_object() -> + [{ + "[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]", + <<"[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]">>, + [[{<<"alpha">>, [1, 2, 3]}, {<<"beta">>, [{<<"alpha">>, [1.0, 2.0, 3.0]}, {<<"beta">>, [true, false]}]}], [[{}]]], + [ + start_array, + start_object, + {key, <<"alpha">>}, + start_array, + {integer, 1}, + {integer, 2}, + {integer, 3}, + end_array, + {key, <<"beta">>}, + start_object, + {key, <<"alpha">>}, + start_array, + {float, 1.0}, + {float, 2.0}, + {float, 3.0}, + end_array, + {key, <<"beta">>}, + start_array, + {literal, true}, + {literal, false}, + end_array, + end_object, + end_object, + start_array, + start_object, + end_object, + end_array, + end_array + ] + }]. + + +special_objects() -> + [ + { + "[{key, atom}]", + <<"{\"key\":\"atom\"}">>, + [{key, atom}], + [start_object, {key, <<"key">>}, {string, <<"atom">>}, end_object] + }, + { + "[{1, true}]", + <<"{\"1\":true}">>, + [{1, true}], + [start_object, {key, <<"1">>}, {literal, true}, end_object] + } + ]. + + +special_array() -> + [ + { + "[foo, bar]", + <<"[\"foo\",\"bar\"]">>, + [foo, bar], + [start_array, {string, <<"foo">>}, {string, <<"bar">>}, end_array] + } + ]. + + +wrap_with_array({Title, JSON, Term, Events}) -> + { + "[" ++ Title ++ "]", + <<"[", JSON/binary, "]">>, + [Term], + [start_array] ++ Events ++ [end_array] + }. + + +wrap_with_object({Title, JSON, Term, Events}) -> + { + "{\"key\":" ++ Title ++ "}", + <<"{\"key\":", JSON/binary, "}">>, + [{<<"key">>, Term}], + [start_object, {key, <<"key">>}] ++ Events ++ [end_object] + }. + + +sane_float_to_list(X) -> + [Output] = io_lib:format("~p", [X]), + Output. + + +incremental_decode(JSON) -> + Final = lists:foldl( + fun(Byte, Decoder) -> {incomplete, F} = Decoder(Byte), F end, + decoder(jsx, [], [stream]), + json_to_bytes(JSON) + ), + Final(end_stream). + + +incremental_parse(Events) -> + Final = lists:foldl( + fun(Event, Parser) -> {incomplete, F} = Parser(Event), F end, + parser(?MODULE, [], [stream]), + lists:map(fun(X) -> [X] end, Events) + ), + Final(end_stream). + + +%% used to convert a json text into a list of codepoints to be incrementally +%% parsed +json_to_bytes(JSON) -> json_to_bytes(JSON, []). + +json_to_bytes(<<>>, Acc) -> [<<>>] ++ lists:reverse(Acc); +json_to_bytes(<<X, Rest/binary>>, Acc) -> json_to_bytes(Rest, [<<X>>] ++ Acc). + + +%% actual tests! +decode_test_() -> + Data = test_cases(), + [{Title, ?_assertEqual(Events ++ [end_json], (decoder(?MODULE, [], []))(JSON))} + || {Title, JSON, _, Events} <- Data + ] ++ + [{Title ++ " (incremental)", ?_assertEqual(Events ++ [end_json], incremental_decode(JSON))} + || {Title, JSON, _, Events} <- Data + ]. + + +parse_test_() -> + Data = test_cases(), + [{Title, ?_assertEqual(Events ++ [end_json], (parser(?MODULE, [], []))(Events ++ [end_json]))} + || {Title, _, _, Events} <- Data + ] ++ + [{Title ++ " (incremental)", ?_assertEqual(Events ++ [end_json], incremental_parse(Events))} + || {Title, _, _, Events} <- Data + ]. + + +encode_test_() -> + Data = test_cases(), + [ + { + Title, ?_assertEqual( + Events ++ [end_json], + (jsx:encoder(jsx, [], []))(Term) + ) + } || {Title, _, Term, Events} <- Data + ]. + +end_stream_test_() -> + Tokens = [start_object, end_object, end_json], + [ + {"encoder end_stream", ?_assertEqual( + Tokens, + begin + {incomplete, F} = (jsx:parser(jsx, [], [stream]))([start_object, end_object]), + F(end_stream) + end + )}, + {"encoder end_json", ?_assertEqual( + Tokens, + begin + {incomplete, F} = (jsx:parser(jsx, [], [stream]))([start_object, end_object]), + F(end_json) + end + )}, + {"decoder end_stream", ?_assertEqual( + Tokens, + begin {incomplete, F} = (jsx:decoder(jsx, [], [stream]))(<<"{}">>), F(end_stream) end + )}, + {"decoder end_json", ?_assertEqual( + Tokens, + begin {incomplete, F} = (jsx:decoder(jsx, [], [stream]))(<<"{}">>), F(end_json) end + )} + ]. + + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_config.erl b/server/_build/default/plugins/jsx/src/jsx_config.erl new file mode 100644 index 0000000..47cbcf7 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_config.erl @@ -0,0 +1,346 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_config). + +-export([parse_config/1]). +-export([config_to_list/1]). +-export([extract_config/1, valid_flags/0]). + +-ifdef(TEST). +-export([fake_error_handler/3]). +-endif. + +-include("jsx_config.hrl"). + +-type handler_type(Handler) :: + fun((jsx:json_text() | end_stream | + jsx:json_term(), + {decoder, any(), module(), null | list(), list()} | + {parser, any(), module(), list()} | + {encoder, any(), module()}, + list({pre_encode, fun((any()) -> any())} | + {error_handler, Handler} | + {incomplete_handler, Handler} | + atom())) -> any()). +-type handler() :: handler_type(handler()). +-export_type([handler/0]). + +-type config() :: #config{}. +-export_type([config/0]). + +%% parsing of jsx config +-spec parse_config(Config::proplists:proplist()) -> config(). + +parse_config(Config) -> parse_config(Config, #config{}). + +parse_config([], Config) -> Config; +parse_config([escaped_forward_slashes|Rest], Config) -> + parse_config(Rest, Config#config{escaped_forward_slashes=true}); +parse_config([escaped_strings|Rest], Config) -> + parse_config(Rest, Config#config{escaped_strings=true}); +parse_config([unescaped_jsonp|Rest], Config) -> + parse_config(Rest, Config#config{unescaped_jsonp=true}); +parse_config([dirty_strings|Rest], Config) -> + parse_config(Rest, Config#config{dirty_strings=true}); +parse_config([multi_term|Rest], Config) -> + parse_config(Rest, Config#config{multi_term=true}); +parse_config([return_tail|Rest], Config) -> + parse_config(Rest, Config#config{return_tail=true}); +%% retained for backwards compat, now does nothing however +parse_config([repeat_keys|Rest], Config) -> + parse_config(Rest, Config); +parse_config([uescape|Rest], Config) -> + parse_config(Rest, Config#config{uescape=true}); +parse_config([strict|Rest], Config) -> + parse_config(Rest, Config#config{ + strict_comments=true, + strict_commas=true, + strict_utf8=true, + strict_single_quotes=true, + strict_escapes=true, + strict_control_codes=true + }); +parse_config([{strict, Strict}|Rest], Config) -> + parse_strict(Strict, Rest, Config); +parse_config([stream|Rest], Config) -> + parse_config(Rest, Config#config{stream=true}); +parse_config([{error_handler, ErrorHandler}|Rest] = Options, Config) when is_function(ErrorHandler, 3) -> + case Config#config.error_handler of + false -> parse_config(Rest, Config#config{error_handler=ErrorHandler}) + ; _ -> erlang:error(badarg, [Options, Config]) + end; +parse_config([{incomplete_handler, IncompleteHandler}|Rest] = Options, Config) when is_function(IncompleteHandler, 3) -> + case Config#config.incomplete_handler of + false -> parse_config(Rest, Config#config{incomplete_handler=IncompleteHandler}) + ; _ -> erlang:error(badarg, [Options, Config]) + end; +parse_config(_Options, _Config) -> erlang:error(badarg). + + +parse_strict([], Rest, Config) -> parse_config(Rest, Config); +parse_strict([comments|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_comments=true}); +parse_strict([trailing_commas|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_commas=true}); +parse_strict([utf8|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_utf8=true}); +parse_strict([single_quotes|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_single_quotes=true}); +parse_strict([escapes|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_escapes=true}); +parse_strict([control_codes|Strict], Rest, Config) -> + parse_strict(Strict, Rest, Config#config{strict_control_codes=true}); +parse_strict(_Strict, _Rest, _Config) -> + erlang:error(badarg). + + + +-spec config_to_list(Config::config()) -> proplists:proplist(). + +config_to_list(Config) -> + reduce_config(lists:map( + fun ({error_handler, F}) -> {error_handler, F}; + ({incomplete_handler, F}) -> {incomplete_handler, F}; + ({Key, true}) -> Key + end, + lists:filter( + fun({_, false}) -> false; (_) -> true end, + lists:zip(record_info(fields, config), tl(tuple_to_list(Config))) + ) + )). + + +reduce_config(Input) -> reduce_config(Input, [], []). + +reduce_config([], Output, Strict) -> + case length(Strict) of + 0 -> lists:reverse(Output); + 5 -> lists:reverse(Output) ++ [strict]; + _ -> lists:reverse(Output) ++ [{strict, lists:reverse(Strict)}] + end; +reduce_config([strict_comments|Input], Output, Strict) -> + reduce_config(Input, Output, [comments] ++ Strict); +reduce_config([strict_utf8|Input], Output, Strict) -> + reduce_config(Input, Output, [utf8] ++ Strict); +reduce_config([strict_single_quotes|Input], Output, Strict) -> + reduce_config(Input, Output, [single_quotes] ++ Strict); +reduce_config([strict_escapes|Input], Output, Strict) -> + reduce_config(Input, Output, [escapes] ++ Strict); +reduce_config([strict_control_codes|Input], Output, Strict) -> + reduce_config(Input, Output, [control_codes] ++ Strict); +reduce_config([Else|Input], Output, Strict) -> + reduce_config(Input, [Else] ++ Output, Strict). + + +-spec valid_flags() -> [atom()]. + +valid_flags() -> + [ + escaped_forward_slashes, + escaped_strings, + unescaped_jsonp, + dirty_strings, + multi_term, + return_tail, + repeat_keys, + strict, + stream, + uescape, + error_handler, + incomplete_handler + ]. + + +-spec extract_config(Config::proplists:proplist()) -> proplists:proplist(). + +extract_config(Config) -> + extract_parser_config(Config, []). + +extract_parser_config([], Acc) -> Acc; +extract_parser_config([{K,V}|Rest], Acc) -> + case lists:member(K, valid_flags()) of + true -> extract_parser_config(Rest, [{K,V}] ++ Acc) + ; false -> extract_parser_config(Rest, Acc) + end; +extract_parser_config([K|Rest], Acc) -> + case lists:member(K, valid_flags()) of + true -> extract_parser_config(Rest, [K] ++ Acc) + ; false -> extract_parser_config(Rest, Acc) + end. + + +%% eunit tests +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +config_test_() -> + [ + {"all flags", + ?_assertEqual( + #config{escaped_forward_slashes = true, + escaped_strings = true, + unescaped_jsonp = true, + dirty_strings = true, + multi_term = true, + return_tail = true, + strict_comments = true, + strict_commas = true, + strict_utf8 = true, + strict_single_quotes = true, + strict_escapes = true, + strict_control_codes = true, + stream = true, + uescape = true + }, + parse_config([dirty_strings, + escaped_forward_slashes, + escaped_strings, + unescaped_jsonp, + multi_term, + return_tail, + repeat_keys, + strict, + stream, + uescape + ]) + ) + }, + {"strict flag", + ?_assertEqual( + #config{strict_comments = true, + strict_commas = true, + strict_utf8 = true, + strict_single_quotes = true, + strict_escapes = true, + strict_control_codes = true + }, + parse_config([strict]) + ) + }, + {"strict selective", + ?_assertEqual( + #config{strict_comments = true}, + parse_config([{strict, [comments]}]) + ) + }, + {"strict expanded", + ?_assertEqual( + #config{strict_comments = true, + strict_utf8 = true, + strict_single_quotes = true, + strict_escapes = true + }, + parse_config([{strict, [comments, utf8, single_quotes, escapes]}]) + ) + }, + {"error_handler flag", ?_assertEqual( + #config{error_handler=fun ?MODULE:fake_error_handler/3}, + parse_config([{error_handler, fun ?MODULE:fake_error_handler/3}]) + )}, + {"two error_handlers defined", ?_assertError( + badarg, + parse_config([ + {error_handler, fun(_, _, _) -> true end}, + {error_handler, fun(_, _, _) -> false end} + ]) + )}, + {"incomplete_handler flag", ?_assertEqual( + #config{incomplete_handler=fun ?MODULE:fake_error_handler/3}, + parse_config([{incomplete_handler, fun ?MODULE:fake_error_handler/3}]) + )}, + {"two incomplete_handlers defined", ?_assertError( + badarg, + parse_config([ + {incomplete_handler, fun(_, _, _) -> true end}, + {incomplete_handler, fun(_, _, _) -> false end} + ]) + )}, + {"bad option flag", ?_assertError(badarg, parse_config([this_flag_does_not_exist]))} + ]. + + +config_to_list_test_() -> + [ + {"empty config", ?_assertEqual( + [], + config_to_list(#config{}) + )}, + {"all flags", ?_assertEqual( + [dirty_strings, + escaped_forward_slashes, + escaped_strings, + multi_term, + stream, + uescape, + unescaped_jsonp, + strict + ], + config_to_list( + #config{escaped_forward_slashes = true, + escaped_strings = true, + unescaped_jsonp = true, + dirty_strings = true, + multi_term = true, + strict_comments = true, + strict_utf8 = true, + strict_single_quotes = true, + strict_escapes = true, + strict_control_codes = true, + stream = true, + uescape = true + } + ) + )}, + {"single strict", ?_assertEqual( + [{strict, [comments]}], + config_to_list(#config{strict_comments = true}) + )}, + {"multiple strict", ?_assertEqual( + [{strict, [utf8, single_quotes, escapes]}], + config_to_list(#config{strict_utf8 = true, strict_single_quotes = true, strict_escapes = true}) + )}, + {"all strict", ?_assertEqual( + [strict], + config_to_list(#config{strict_comments = true, + strict_utf8 = true, + strict_single_quotes = true, + strict_escapes = true, + strict_control_codes = true}) + )}, + {"error handler", ?_assertEqual( + [{error_handler, fun ?MODULE:fake_error_handler/3}], + config_to_list(#config{error_handler=fun ?MODULE:fake_error_handler/3}) + )}, + {"incomplete handler", ?_assertEqual( + [{incomplete_handler, fun ?MODULE:fake_error_handler/3}], + config_to_list(#config{incomplete_handler=fun ?MODULE:fake_error_handler/3}) + )} + ]. + + +fake_error_handler(_, _, _) -> ok. + + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_config.hrl b/server/_build/default/plugins/jsx/src/jsx_config.hrl new file mode 100644 index 0000000..c89963c --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_config.hrl @@ -0,0 +1,18 @@ +-record(config, { + dirty_strings = false :: boolean(), + escaped_forward_slashes = false :: boolean(), + escaped_strings = false :: boolean(), + multi_term = false :: boolean(), + strict_comments = false :: boolean(), + strict_commas = false :: boolean(), + strict_utf8 = false :: boolean(), + strict_single_quotes = false :: boolean(), + strict_escapes = false :: boolean(), + strict_control_codes = false :: boolean(), + stream = false :: boolean(), + return_tail = false :: boolean(), + uescape = false :: boolean(), + unescaped_jsonp = false :: boolean(), + error_handler = false :: false | jsx_config:handler(), + incomplete_handler = false :: false | jsx_config:handler() +}). diff --git a/server/_build/default/plugins/jsx/src/jsx_consult.erl b/server/_build/default/plugins/jsx/src/jsx_consult.erl new file mode 100644 index 0000000..b1a4424 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_consult.erl @@ -0,0 +1,99 @@ +%% The MIT License + +%% Copyright (c) 2010-2015 Alisdair Sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_consult). + +-export([consult/2]). +-export([init/1, reset/1, handle_event/2]). + + +-record(config, { + labels = binary, + return_maps = false +}). + +-type config() :: list(). +-export_type([config/0]). + +-ifndef(maps_support). +-type json_value() :: list(json_value()) + | list({binary() | atom(), json_value()}) + | true + | false + | null + | integer() + | float() + | binary(). +-endif. + +-ifdef(maps_support). +-type json_value() :: list(json_value()) + | map() + | true + | false + | null + | integer() + | float() + | binary(). +-endif. + + +-ifdef(maps_always). +opts(Opts) -> [return_maps, multi_term] ++ Opts. +-endif. +-ifndef(maps_always). +opts(Opts) -> [multi_term] ++ Opts. +-endif. + +-spec consult(File::file:name_all(), Config::config()) -> [json_value()]. + +consult(File, Config) when is_list(Config) -> + case file:read_file(File) of + {ok, Bin} -> + {Final, _, _} = (jsx:decoder( + ?MODULE, + opts(Config), + jsx_config:extract_config(opts(Config)) + ))(Bin), + lists:reverse(Final); + {error, _} -> erlang:error(badarg) + end. + + +-type state() :: {[], proplists:proplist(), {list(), #config{}}}. +-spec init(Config::proplists:proplist()) -> state(). + +init(Config) -> {[], Config, jsx_to_term:start_term(Config)}. + + +-spec reset(State::state()) -> state(). + +reset({Acc, Config, _}) -> {Acc, Config, jsx_to_term:start_term(Config)}. + + +-spec handle_event(Event::any(), State::state()) -> state(). + +handle_event(end_json, {Acc, Config, State}) -> + {[jsx_to_term:get_value(State)] ++ Acc, Config, State}; +handle_event(Event, {Acc, Config, State}) -> + {Acc, Config, jsx_to_term:handle_event(Event, State)}. diff --git a/server/_build/default/plugins/jsx/src/jsx_decoder.erl b/server/_build/default/plugins/jsx/src/jsx_decoder.erl new file mode 100644 index 0000000..1a834d9 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_decoder.erl @@ -0,0 +1,1916 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_decoder). + +%% inline handle_event, format_number and maybe_replace +-compile({inline, [handle_event/3]}). +-compile({inline, [format_number/1]}). +-compile({inline, [maybe_replace/2]}). +-compile({inline, [doublequote/5, singlequote/5]}). + +-export([decoder/3, resume/6]). + + +-spec decoder(Handler::module(), State::any(), Config::list()) -> jsx:decoder(). + +decoder(Handler, State, Config) -> + fun(JSON) -> start(JSON, {Handler, Handler:init(State)}, [], jsx_config:parse_config(Config)) end. + + +%% resume allows continuation from interrupted decoding without having to explicitly export +%% all states +-spec resume( + Rest::binary(), + State::atom(), + Handler::{atom(), any()}, + Acc::any(), + Stack::list(atom()), + Config::jsx:config() + ) -> jsx:decoder() | {incomplete, jsx:decoder()}. + +resume(Rest, State, Handler, Acc, Stack, Config) -> + case State of + start -> start(Rest, Handler, Stack, Config); + value -> value(Rest, Handler, Stack, Config); + object -> object(Rest, Handler, Stack, Config); + array -> array(Rest, Handler, Stack, Config); + colon -> colon(Rest, Handler, Stack, Config); + key -> key(Rest, Handler, Stack, Config); + string -> string(Rest, Handler, Acc, Stack, Config); + number -> number(Rest, Handler, Acc, Stack, Config); + true -> true(Rest, Handler, Stack, Config); + false -> false(Rest, Handler, Stack, Config); + null -> null(Rest, Handler, Stack, Config); + comment -> comment(Rest, Handler, Acc, Stack, Config); + maybe_done -> maybe_done(Rest, Handler, Stack, Config); + done -> done(Rest, Handler, Stack, Config) + end. + + +-include("jsx_config.hrl"). + + +%% whitespace +-define(space, 16#20). +-define(tab, 16#09). +-define(cr, 16#0D). +-define(newline, 16#0A). + +%% object delimiters +-define(start_object, 16#7B). +-define(end_object, 16#7D). + +%% array delimiters +-define(start_array, 16#5B). +-define(end_array, 16#5D). + +%% kv seperator +-define(comma, 16#2C). +-define(doublequote, 16#22). +-define(singlequote, 16#27). +-define(colon, 16#3A). + +%% string escape sequences +-define(rsolidus, 16#5C). +-define(solidus, 16#2F). + +%% math +-define(zero, 16#30). +-define(decimalpoint, 16#2E). +-define(negative, 16#2D). +-define(positive, 16#2B). + +%% comments +-define(star, 16#2A). + + +%% some useful guards +-define(is_hex(Symbol), + (Symbol >= $a andalso Symbol =< $f) orelse + (Symbol >= $A andalso Symbol =< $F) orelse + (Symbol >= $0 andalso Symbol =< $9) +). + +-define(is_nonzero(Symbol), + Symbol >= $1 andalso Symbol =< $9 +). + + +%% error is a macro so the stack trace shows the error site when possible +-ifndef(error). +-define(error(State, Bin, Handler, Acc, Stack, Config), + case Config#config.error_handler of + false -> erlang:error(badarg); + F -> F(Bin, {decoder, State, Handler, Acc, Stack}, jsx_config:config_to_list(Config)) + end +). +-define(error(State, Bin, Handler, Stack, Config), + ?error(State, Bin, Handler, null, Stack, Config) +). +-endif. + + +incomplete(State, Rest, Handler, Stack, Config = #config{stream=false}) -> + ?error(State, Rest, Handler, Stack, Config); +incomplete(State, Rest, Handler, Stack, Config) -> + incomplete(State, Rest, Handler, unused, Stack, Config). + + +incomplete(State, Rest, Handler, Acc, Stack, Config = #config{stream=false}) -> + ?error(State, Rest, Handler, Acc, Stack, Config); +incomplete(State, Rest, Handler, Acc, Stack, Config = #config{incomplete_handler=false}) -> + {incomplete, fun(Stream) when is_binary(Stream) -> + resume(<<Rest/binary, Stream/binary>>, State, Handler, Acc, Stack, Config); + (End) when End == end_stream; End == end_json -> + case resume(<<Rest/binary, ?space/utf8>>, State, Handler, Acc, Stack, Config#config{stream=false}) of + {incomplete, _} -> ?error(State, Rest, Handler, Acc, Stack, Config); + Else -> Else + end + end + }; +incomplete(State, Rest, Handler, Acc, Stack, Config = #config{incomplete_handler=F}) -> + F(Rest, {decoder, State, Handler, Acc, Stack}, jsx_config:config_to_list(Config)). + + +handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}. + + +start(<<16#ef, 16#bb, 16#bf, Rest/binary>>, Handler, Stack, Config) -> + value(Rest, Handler, Stack, Config); +start(<<16#ef, 16#bb>>, Handler, Stack, Config) -> + incomplete(start, <<16#ef, 16#bb>>, Handler, Stack, Config); +start(<<16#ef>>, Handler, Stack, Config) -> + incomplete(start, <<16#ef>>, Handler, Stack, Config); +start(<<>>, Handler, Stack, Config) -> + incomplete(start, <<>>, Handler, Stack, Config); +start(Bin, Handler, Stack, Config) -> + value(Bin, Handler, Stack, Config). + + +value(<<?doublequote, Rest/binary>>, Handler, Stack, Config) -> + string(Rest, Handler, Stack, Config); +value(<<?space, Rest/binary>>, Handler, Stack, Config) -> + value(Rest, Handler, Stack, Config); +value(<<?start_object, Rest/binary>>, Handler, Stack, Config) -> + object(Rest, handle_event(start_object, Handler, Config), [key|Stack], Config); +value(<<?start_array, Rest/binary>>, Handler, Stack, Config) -> + array(Rest, handle_event(start_array, Handler, Config), [array|Stack], Config); +value(<<$t, $r, $u, $e, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, true}, Handler, Config), Stack, Config); +value(<<$f, $a, $l, $s, $e, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, false}, Handler, Config), Stack, Config); +value(<<$n, $u, $l, $l, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, null}, Handler, Config), Stack, Config); +value(<<?zero, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [?zero], [zero|Stack], Config); +value(<<$1, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$1], [integer|Stack], Config); +value(<<$2, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$2], [integer|Stack], Config); +value(<<$3, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$3], [integer|Stack], Config); +value(<<$4, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$4], [integer|Stack], Config); +value(<<$5, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$5], [integer|Stack], Config); +value(<<$6, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$6], [integer|Stack], Config); +value(<<$7, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$7], [integer|Stack], Config); +value(<<$8, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$8], [integer|Stack], Config); +value(<<$9, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$9], [integer|Stack], Config); +value(<<?negative, Rest/binary>>, Handler, Stack, Config) -> + number(Rest, Handler, [$-], [negative|Stack], Config); +value(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + value(Rest, Handler, Stack, Config); +value(<<$t, Rest/binary>>, Handler, Stack, Config) -> + true(Rest, Handler, Stack, Config); +value(<<$f, Rest/binary>>, Handler, Stack, Config) -> + false(Rest, Handler, Stack, Config); +value(<<$n, Rest/binary>>, Handler, Stack, Config) -> + null(Rest, Handler, Stack, Config); +value(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + value(Rest, Handler, Stack, Config); +value(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + value(Rest, Handler, Stack, Config); +value(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) -> + string(Rest, Handler, [singlequote|Stack], Config); +value(<<?end_array, _/binary>> = Rest, Handler, Stack, Config=#config{strict_commas=false}) -> + maybe_done(Rest, Handler, Stack, Config); +value(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(value, <<?solidus, Rest/binary>>, Handler, Stack, Config); +value(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, value, [comment|Stack], Config); +value(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, value, [multicomment|Stack], Config); +value(<<?solidus>>, Handler, Stack, Config) -> + incomplete(value, <<?solidus>>, Handler, Stack, Config); +value(<<>>, Handler, Stack, Config) -> + incomplete(value, <<>>, Handler, Stack, Config); +value(Bin, Handler, Stack, Config) -> + ?error(value, Bin, Handler, Stack, Config). + + +object(<<?doublequote, Rest/binary>>, Handler, Stack, Config) -> + string(Rest, Handler, Stack, Config); +object(<<?space, Rest/binary>>, Handler, Stack, Config) -> + object(Rest, Handler, Stack, Config); +object(<<?end_object, Rest/binary>>, Handler, [key|Stack], Config) -> + maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config); +object(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + object(Rest, Handler, Stack, Config); +object(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + object(Rest, Handler, Stack, Config); +object(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + object(Rest, Handler, Stack, Config); +object(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) -> + string(Rest, Handler, [singlequote|Stack], Config); +object(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(object, <<?solidus, Rest/binary>>, Handler, Stack, Config); +object(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, object, [comment|Stack], Config); +object(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, object, [multicomment|Stack], Config); +object(<<?solidus>>, Handler, Stack, Config) -> + incomplete(object, <<?solidus>>, Handler, Stack, Config); +object(<<>>, Handler, Stack, Config) -> + incomplete(object, <<>>, Handler, Stack, Config); +object(Bin, Handler, Stack, Config) -> + ?error(object, Bin, Handler, Stack, Config). + + +array(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) -> + maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config); +array(<<?space, Rest/binary>>, Handler, Stack, Config) -> + array(Rest, Handler, Stack, Config); +array(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + array(Rest, Handler, Stack, Config); +array(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + array(Rest, Handler, Stack, Config); +array(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + array(Rest, Handler, Stack, Config); +array(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + value(<<?solidus, Rest/binary>>, Handler, Stack, Config); +array(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, array, [comment|Stack], Config); +array(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, array, [multicomment|Stack], Config); +array(<<?solidus>>, Handler, Stack, Config) -> + incomplete(array, <<?solidus>>, Handler, Stack, Config); +array(<<>>, Handler, Stack, Config) -> + incomplete(array, <<>>, Handler, Stack, Config); +array(Bin, Handler, Stack, Config) -> + value(Bin, Handler, Stack, Config). + + +colon(<<?colon, Rest/binary>>, Handler, [key|Stack], Config) -> + value(Rest, Handler, [object|Stack], Config); +colon(<<?space, Rest/binary>>, Handler, Stack, Config) -> + colon(Rest, Handler, Stack, Config); +colon(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + colon(Rest, Handler, Stack, Config); +colon(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + colon(Rest, Handler, Stack, Config); +colon(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + colon(Rest, Handler, Stack, Config); +colon(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(colon, <<?solidus, Rest/binary>>, Handler, Stack, Config); +colon(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, colon, [comment|Stack], Config); +colon(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, colon, [multicomment|Stack], Config); +colon(<<?solidus>>, Handler, Stack, Config) -> + incomplete(colon, <<?solidus>>, Handler, Stack, Config); +colon(<<>>, Handler, Stack, Config) -> + incomplete(colon, <<>>, Handler, Stack, Config); +colon(Bin, Handler, Stack, Config) -> + ?error(colon, Bin, Handler, Stack, Config). + + +key(<<?doublequote, Rest/binary>>, Handler, Stack, Config) -> + string(Rest, Handler, Stack, Config); +key(<<?space, Rest/binary>>, Handler, Stack, Config) -> + key(Rest, Handler, Stack, Config); +key(<<?end_object, Rest/binary>>, Handler, [key|Stack], Config=#config{strict_commas=false}) -> + maybe_done(<<?end_object, Rest/binary>>, Handler, [object|Stack], Config); +key(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + key(Rest, Handler, Stack, Config); +key(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + key(Rest, Handler, Stack, Config); +key(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + key(Rest, Handler, Stack, Config); +key(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) -> + string(Rest, Handler, [singlequote|Stack], Config); +key(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(key, <<?solidus, Rest/binary>>, Handler, Stack, Config); +key(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, key, [comment|Stack], Config); +key(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, key, [multicomment|Stack], Config); +key(<<?solidus>>, Handler, Stack, Config) -> + incomplete(key, <<?solidus>>, Handler, Stack, Config); +key(<<>>, Handler, Stack, Config) -> + incomplete(key, <<>>, Handler, Stack, Config); +key(Bin, Handler, Stack, Config) -> + ?error(key, Bin, Handler, Stack, Config). + + +%% note that if you encounter an error from string and you can't find the clause that +%% caused it here, it might be in unescape below +string(Bin, Handler, Stack, Config) -> + string(Bin, Handler, [], Stack, Config). + + +string(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) -> + doublequote(Rest, Handler, Acc, Stack, Config); +string(<<?singlequote, Rest/binary>>, Handler, Acc, Stack, Config) -> + singlequote(Rest, Handler, Acc, Stack, Config); +string(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace(?solidus, Config)], Stack, Config); +string(<<?rsolidus/utf8, Rest/binary>>, Handler, Acc, Stack, Config) -> + unescape(Rest, Handler, Acc, Stack, Config); +%% TODO this is pretty gross and i don't like it +string(<<X/utf8, Rest/binary>> = Bin, Handler, Acc, Stack, Config=#config{uescape=true}) -> + case X of + X when X < 16#80 -> count(Bin, Handler, Acc, Stack, Config); + X -> string(Rest, Handler, [Acc, json_escape_sequence(X)], Stack, Config) + end; +%% u+2028 +string(<<226, 128, 168, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace(16#2028, Config)], Stack, Config); +%% u+2029 +string(<<226, 128, 169, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace(16#2029, Config)], Stack, Config); +string(<<X/utf8, _/binary>> = Bin, Handler, Acc, Stack, Config=#config{strict_control_codes=true}) when X > 16#1f -> + count(Bin, Handler, Acc, Stack, Config); +string(<<_/utf8, _/binary>> = Bin, Handler, Acc, Stack, Config=#config{strict_control_codes=false}) -> + count(Bin, Handler, Acc, Stack, Config); +%% necessary for bytes that are badly formed utf8 that won't match in `count` +string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) -> + string(Rest, Handler, [Acc, X], Stack, Config); +%% u+fffe and u+ffff for R14BXX (subsequent runtimes will happily match with /utf8 +string(<<239, 191, 190, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, <<16#fffe/utf8>>], Stack, Config); +string(<<239, 191, 191, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, <<16#ffff/utf8>>], Stack, Config); +string(<<>>, Handler, Acc, Stack, Config) -> + incomplete(string, <<>>, Handler, Acc, Stack, Config); +string(<<X>>, Handler, Acc, Stack, Config) when X >= 2#11000000 -> + incomplete(string, <<X>>, Handler, Acc, Stack, Config); +string(<<X, Y>>, Handler, Acc, Stack, Config) when X >= 2#11100000, Y >= 2#10000000 -> + incomplete(string, <<X, Y>>, Handler, Acc, Stack, Config); +string(<<X, Y, Z>>, Handler, Acc, Stack, Config) + when X >= 2#11100000, Y >= 2#10000000, Z >= 2#10000000 -> + incomplete(string, <<X, Y, Z>>, Handler, Acc, Stack, Config); +%% surrogates +string(<<237, X, _, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) + when X >= 160 -> + string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config); +%% overlong encodings and missing continuations of a 2 byte sequence +string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) + when X >= 192, X =< 223 -> + strip_continuations(Rest, Handler, Acc, Stack, Config, 1); +%% overlong encodings and missing continuations of a 3 byte sequence +string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) + when X >= 224, X =< 239 -> + strip_continuations(Rest, Handler, Acc, Stack, Config, 2); +%% overlong encodings and missing continuations of a 4 byte sequence +string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) + when X >= 240, X =< 247 -> + strip_continuations(Rest, Handler, Acc, Stack, Config, 3); +%% incompletes and unexpected bytes, including orphan continuations +string(<<_, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) -> + string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config); +string(Bin, Handler, Acc, Stack, Config) -> ?error(string, Bin, Handler, Acc, Stack, Config). + + +count(Bin, Handler, Acc, Stack, Config) -> + Size = count(Bin, 0, Config), + <<Clean:Size/binary, Rest/binary>> = Bin, + string(Rest, Handler, [Acc, Clean], Stack, Config). + + +%% explicitly whitelist ascii set for faster parsing. really? really. someone should +%% submit a patch that unrolls simple guards +count(<<32, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<33, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<?doublequote, _/binary>>, N, _) -> N; +count(<<35, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<36, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<37, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<38, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<?singlequote, _/binary>>, N, _) -> N; +count(<<40, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<41, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<42, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<43, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<44, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<45, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<46, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<?solidus, _/binary>>, N, _) -> N; +count(<<48, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<49, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<50, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<51, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<52, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<53, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<54, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<55, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<56, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<57, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<58, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<59, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<60, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<61, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<62, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<63, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<64, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<65, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<66, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<67, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<68, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<69, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<70, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<71, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<72, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<73, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<74, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<75, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<76, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<77, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<78, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<79, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<80, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<81, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<82, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<83, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<84, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<85, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<86, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<87, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<88, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<89, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<90, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<91, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<?rsolidus, _/binary>>, N, _) -> N; +count(<<93, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<94, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<95, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<96, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<97, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<98, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<99, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<100, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<101, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<102, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<103, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<104, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<105, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<106, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<107, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<108, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<109, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<110, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<111, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<112, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<113, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<114, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<115, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<116, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<117, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<118, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<119, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<120, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<121, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<122, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<123, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<124, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<125, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<126, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<127, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<_, Rest/binary>>, N, Config=#config{dirty_strings=true}) -> + count(Rest, N + 1, Config); +count(<<_/utf8, _/binary>>, N, #config{uescape=true}) -> N; +count(<<X/utf8, Rest/binary>>, N, Config=#config{strict_control_codes=false}) when X < 32 -> + count(Rest, N + 1, Config); +count(<<X/utf8, _/binary>>, N, #config{strict_control_codes=true}) when X < 32 -> N; +count(<<X/utf8, Rest/binary>>, N, Config) -> + case X of + X when X < 16#800 -> count(Rest, N + 2, Config); + %% jsonp escaping + 16#2028 -> N; + 16#2029 -> N; + X when X < 16#10000 -> count(Rest, N + 3, Config); + _ -> count(Rest, N + 4, Config) + end; +count(_, N, _) -> N. + + +doublequote(Rest, Handler, Acc, [key|_] = Stack, Config) -> + colon(Rest, handle_event({key, iolist_to_binary(Acc)}, Handler, Config), Stack, Config); +doublequote(Rest, Handler, Acc, [singlequote|_] = Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace(?doublequote, Config)], Stack, Config); +doublequote(<<>>, Handler, Acc, [singlequote|_] = Stack, Config) -> + incomplete(string, <<?doublequote>>, Handler, Acc, Stack, Config); +doublequote(Rest, Handler, Acc, Stack, Config) -> + maybe_done(Rest, handle_event({string, iolist_to_binary(Acc)}, Handler, Config), Stack, Config). + + +singlequote(Rest, Handler, Acc, [singlequote, key|Stack], Config) -> + colon(Rest, handle_event({key, iolist_to_binary(Acc)}, Handler, Config), [key|Stack], Config); +singlequote(Rest, Handler, Acc, [singlequote|Stack], Config) -> + maybe_done(Rest, handle_event({string, iolist_to_binary(Acc)}, Handler, Config), Stack, Config); +singlequote(Rest, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, ?singlequote], Stack, Config). + + +%% strips continuation bytes after bad utf bytes, guards against both too short +%% and overlong sequences. N is the maximum number of bytes to strip +strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, 0) -> + string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config); +strip_continuations(<<X, Rest/binary>>, Handler, Acc, Stack, Config, N) when X >= 128, X =< 191 -> + strip_continuations(Rest, Handler, Acc, Stack, Config, N - 1); +%% if end of input is reached before stripping the max number of continuations +%% possible magic numbers are reinserted into the stream that get us back to +%% the same state without complicated machinery +strip_continuations(<<>>, Handler, Acc, Stack, Config, N) -> + case N of + 1 -> incomplete(string, <<192>>, Handler, Acc, Stack, Config); + 2 -> incomplete(string, <<224>>, Handler, Acc, Stack, Config); + 3 -> incomplete(string, <<240>>, Handler, Acc, Stack, Config) + end; +%% not a continuation byte, insert a replacement character for sequence thus +%% far and dispatch back to string +strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, _) -> + string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config). + + +%% this all gets really gross and should probably eventually be folded into +%% but for now it fakes being part of string on incompletes and errors +unescape(<<?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) -> + string(<<?rsolidus, Rest/binary>>, Handler, [Acc, <<?rsolidus>>], Stack, Config); +unescape(<<C, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) -> + string(Rest, Handler, [Acc, <<?rsolidus, C>>], Stack, Config); +unescape(<<$b, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\b, Config)], Stack, Config); +unescape(<<$f, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\f, Config)], Stack, Config); +unescape(<<$n, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\n, Config)], Stack, Config); +unescape(<<$r, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\r, Config)], Stack, Config); +unescape(<<$t, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\t, Config)], Stack, Config); +unescape(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\", Config)], Stack, Config); +unescape(<<?singlequote, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_single_quotes=false}) -> + string(Rest, Handler, [Acc, <<?singlequote>>], Stack, Config); +unescape(<<?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($\\, Config)], Stack, Config); +unescape(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) -> + string(Rest, Handler, [Acc, maybe_replace($/, Config)], Stack, Config); +unescape(<<$u, F, A, B, C, ?rsolidus, $u, G, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config) + when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B), + (X == $c orelse X == $d orelse X == $e orelse X == $f orelse X == $C orelse X == $D orelse X == $E orelse X == $F), + (F == $d orelse F == $D), + (G == $d orelse G == $D), + ?is_hex(B), ?is_hex(C), ?is_hex(Y), ?is_hex(Z) + -> + High = erlang:list_to_integer([$d, A, B, C], 16), + Low = erlang:list_to_integer([$d, X, Y, Z], 16), + Codepoint = (High - 16#d800) * 16#400 + (Low - 16#dc00) + 16#10000, + string(Rest, Handler, [Acc, <<Codepoint/utf8>>], Stack, Config); +unescape(<<$u, F, A, B, C, ?rsolidus, $u, W, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config) + when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B), + (F == $d orelse F == $D), + ?is_hex(B), ?is_hex(C), ?is_hex(W), ?is_hex(X), ?is_hex(Y), ?is_hex(Z) + -> + case Config#config.strict_utf8 of + true -> ?error(<<$u, $d, A, B, C, ?rsolidus, $u, W, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config); + false -> string(Rest, Handler, [Acc, <<16#fffd/utf8>>, <<16#fffd/utf8>>], Stack, Config) + end; +unescape(<<$u, F, A, B, C, ?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config) + when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B), + (F == $d orelse F == $D), + ?is_hex(B), ?is_hex(C) + -> + incomplete(string, <<?rsolidus, $u, $d, A, B, C, ?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config); +unescape(<<$u, F, A, B, C>>, Handler, Acc, Stack, Config) + when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B), + (F == $d orelse F == $D), + ?is_hex(B), ?is_hex(C) + -> + incomplete(string, <<?rsolidus, $u, $d, A, B, C>>, Handler, Acc, Stack, Config); +unescape(<<$u, A, B, C, D, Rest/binary>>, Handler, Acc, Stack, Config) + when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> + case erlang:list_to_integer([A, B, C, D], 16) of + Codepoint when Codepoint < 16#d800; Codepoint > 16#dfff -> + string(Rest, Handler, [Acc, maybe_replace(Codepoint, Config)], Stack, Config); + _ when Config#config.strict_utf8 -> + ?error(string, <<?rsolidus, $u, A, B, C, D, Rest/binary>>, Handler, Acc, Stack, Config); + _ -> string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config) + end; +unescape(Bin, Handler, Acc, Stack, Config) -> + case is_partial_escape(Bin) of + true -> incomplete(string, <<?rsolidus/utf8, Bin/binary>>, Handler, Acc, Stack, Config); + false -> case Config#config.strict_escapes of + true -> ?error(string, <<?rsolidus, Bin/binary>>, Handler, Acc, Stack, Config); + false -> string(Bin, Handler, [Acc, <<?rsolidus>>], Stack, Config) + end + end. + + +is_partial_escape(<<$u, A, B, C>>) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> true; +is_partial_escape(<<$u, A, B>>) when ?is_hex(A), ?is_hex(B) -> true; +is_partial_escape(<<$u, A>>) when ?is_hex(A) -> true; +is_partial_escape(<<$u>>) -> true; +is_partial_escape(<<>>) -> true; +is_partial_escape(_) -> false. + + +maybe_replace(C, #config{dirty_strings=true}) -> <<C>>; +maybe_replace($\b, #config{escaped_strings=true}) -> <<$\\, $b>>; +maybe_replace($\t, #config{escaped_strings=true}) -> <<$\\, $t>>; +maybe_replace($\n, #config{escaped_strings=true}) -> <<$\\, $n>>; +maybe_replace($\f, #config{escaped_strings=true}) -> <<$\\, $f>>; +maybe_replace($\r, #config{escaped_strings=true}) -> <<$\\, $r>>; +maybe_replace($\", #config{escaped_strings=true}) -> <<$\\, $\">>; +maybe_replace($/, Config=#config{escaped_strings=true}) -> + case Config#config.escaped_forward_slashes of + true -> <<$\\, $/>> + ; false -> <<$/>> + end; +maybe_replace($\\, #config{escaped_strings=true}) -> <<$\\, $\\>>; +maybe_replace(X, Config=#config{escaped_strings=true}) when X == 16#2028; X == 16#2029 -> + case Config#config.unescaped_jsonp of + true -> <<X/utf8>> + ; false -> json_escape_sequence(X) + end; +maybe_replace(X, #config{escaped_strings=true}) when X < 32 -> + json_escape_sequence(X); +maybe_replace(X, _Config) -> <<X/utf8>>. + + +%% convert a codepoint to it's \uXXXX equiv. +json_escape_sequence(X) when X < 65536 -> + <<A:4, B:4, C:4, D:4>> = <<X:16>>, + <<$\\, $u, (to_hex(A)), (to_hex(B)), (to_hex(C)), (to_hex(D))>>; +json_escape_sequence(X) -> + Adjusted = X - 16#10000, + <<A:10, B:10>> = <<Adjusted:20>>, + [json_escape_sequence(A + 16#d800), json_escape_sequence(B + 16#dc00)]. + + +%% ascii "1" is [49], "2" is [50], etc... +to_hex(10) -> $a; +to_hex(11) -> $b; +to_hex(12) -> $c; +to_hex(13) -> $d; +to_hex(14) -> $e; +to_hex(15) -> $f; +to_hex(X) -> X + 48. + + +number(<<$e, Rest/binary>>, Handler, Acc, [integer|Stack], Config) -> + number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config); +number(<<$E, Rest/binary>>, Handler, Acc, [integer|Stack], Config) -> + number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config); +number(<<$e, Rest/binary>>, Handler, Acc, [zero|Stack], Config) -> + number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config); +number(<<$E, Rest/binary>>, Handler, Acc, [zero|Stack], Config) -> + number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config); +number(<<>>, Handler, Acc, [State|Stack], Config=#config{stream=false}) -> + NumType = case State of + zero -> integer; + integer -> integer; + decimal -> float; + exp -> float + end, + finish_number(<<>>, Handler, {NumType, iolist_to_binary(Acc)}, Stack, Config); +number(<<>>, Handler, Acc, Stack, Config) -> + incomplete(number, <<>>, Handler, Acc, Stack, Config); +number(Bin, Handler, Acc, [State|Stack], Config) -> + Counted = case State of + zero -> zero(Bin, 0); + integer -> integer(Bin, 0); + negative -> negative(Bin, 0); + initialdecimal -> initialdecimal(Bin, 0); + decimal -> decimal(Bin, 0); + e -> e(Bin, 0); + ex -> ex(Bin, 0); + exp -> exp(Bin, 0) + end, + case Counted of + {finish_integer, Size} -> + <<Clean:Size/binary, Rest/binary>> = Bin, + finish_number(Rest, Handler, {integer, iolist_to_binary([Acc, Clean])}, Stack, Config); + {finish_float, Size} -> + <<Clean:Size/binary, Rest/binary>> = Bin, + finish_number(Rest, Handler, {float, iolist_to_binary([Acc, Clean])}, Stack, Config); + {error, Size} -> + <<Clean:Size/binary, Rest/binary>> = Bin, + ?error(number, Rest, Handler, [Acc, Clean], Stack, Config); + {NewState, Size} -> + <<Clean:Size/binary, Rest/binary>> = Bin, + number(Rest, Handler, [Acc, Clean], [NewState|Stack], Config) + end. + + +zero(<<?decimalpoint, Rest/binary>>, N) -> initialdecimal(Rest, N + 1); +zero(<<$e, _/binary>>, N) -> {integer, N}; +zero(<<$E, _/binary>>, N) -> {integer, N}; +zero(<<>>, N) -> {zero, N}; +zero(_, N) -> {finish_integer, N}. + + +integer(<<$0, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$1, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$2, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$3, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$4, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$5, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$6, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$7, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$8, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<$9, Rest/binary>>, N) -> integer(Rest, N + 1); +integer(<<?decimalpoint, Rest/binary>>, N) -> initialdecimal(Rest, N + 1); +integer(<<$e, _/binary>>, N) -> {integer, N}; +integer(<<$E, _/binary>>, N) -> {integer, N}; +integer(<<>>, N) -> {integer, N}; +integer(_, N) -> {finish_integer, N}. + + +negative(<<$0, Rest/binary>>, N) -> zero(Rest, N + 1); +negative(<<$1, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$2, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$3, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$4, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$5, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$6, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$7, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$8, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<$9, Rest/binary>>, N) -> integer(Rest, N + 1); +negative(<<>>, N) -> {negative, N}; +negative(_, N) -> {error, N}. + + +initialdecimal(<<$0, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$1, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$2, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$3, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$4, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$5, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$6, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$7, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$8, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<$9, Rest/binary>>, N) -> decimal(Rest, N + 1); +initialdecimal(<<>>, N) -> {initialdecimal, N}; +initialdecimal(_, N) -> {error, N}. + + +decimal(<<$0, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$1, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$2, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$3, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$4, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$5, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$6, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$7, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$8, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$9, Rest/binary>>, N) -> decimal(Rest, N + 1); +decimal(<<$e, Rest/binary>>, N) -> e(Rest, N + 1); +decimal(<<$E, Rest/binary>>, N) -> e(Rest, N + 1); +decimal(<<>>, N) -> {decimal, N}; +decimal(_, N) -> {finish_float, N}. + + +e(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1); +e(<<?positive, Rest/binary>>, N) -> ex(Rest, N + 1); +e(<<?negative, Rest/binary>>, N) -> ex(Rest, N + 1); +e(<<>>, N) -> {e, N}; +e(_, N) -> {error, N}. + + +ex(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1); +ex(<<>>, N) -> {ex, N}; +ex(_, N) -> {error, N}. + + +exp(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1); +exp(<<>>, N) -> {exp, N}; +exp(_, N) -> {finish_float, N}. + + +finish_number(Rest, Handler, Acc, Stack, Config) -> + maybe_done(Rest, handle_event(format_number(Acc), Handler, Config), Stack, Config). + + +-ifndef(no_binary_to_whatever). +format_number({integer, Acc}) -> {integer, binary_to_integer(Acc)}; +format_number({float, Acc}) -> {float, binary_to_float(Acc)}. +-else. +format_number({integer, Acc}) -> {integer, list_to_integer(unicode:characters_to_list(Acc))}; +format_number({float, Acc}) -> {float, list_to_float(unicode:characters_to_list(Acc))}. +-endif. + + +true(<<$r, $u, $e, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, true}, Handler, Config), Stack, Config); +true(<<$r, $u>>, Handler, Stack, Config) -> + incomplete(true, <<$r, $u>>, Handler, Stack, Config); +true(<<$r>>, Handler, Stack, Config) -> + incomplete(true, <<$r>>, Handler, Stack, Config); +true(<<>>, Handler, Stack, Config) -> + incomplete(true, <<>>, Handler, Stack, Config); +true(Bin, Handler, Stack, Config) -> + ?error(true, Bin, Handler, Stack, Config). + + +false(<<$a, $l, $s, $e, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, false}, Handler, Config), Stack, Config); +false(<<$a, $l, $s>>, Handler, Stack, Config) -> + incomplete(false, <<$a, $l, $s>>, Handler, Stack, Config); +false(<<$a, $l>>, Handler, Stack, Config) -> + incomplete(false, <<$a, $l>>, Handler, Stack, Config); +false(<<$a>>, Handler, Stack, Config) -> + incomplete(false, <<$a>>, Handler, Stack, Config); +false(<<>>, Handler, Stack, Config) -> + incomplete(false, <<>>, Handler, Stack, Config); +false(Bin, Handler, Stack, Config) -> + ?error(false, Bin, Handler, Stack, Config). + + +null(<<$u, $l, $l, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, handle_event({literal, null}, Handler, Config), Stack, Config); +null(<<$u, $l>>, Handler, Stack, Config) -> + incomplete(null, <<$u, $l>>, Handler, Stack, Config); +null(<<$u>>, Handler, Stack, Config) -> + incomplete(null, <<$u>>, Handler, Stack, Config); +null(<<>>, Handler, Stack, Config) -> + incomplete(null, <<>>, Handler, Stack, Config); +null(Bin, Handler, Stack, Config) -> + ?error(null, Bin, Handler, Stack, Config). + + +comment(<<?newline, Rest/binary>>, Handler, Resume, [comment|Stack], Config) -> + resume(Rest, Resume, Handler, unused, Stack, Config); +comment(<<?solidus, ?star, Rest/binary>>, Handler, Resume, Stack, Config) -> + comment(Rest, Handler, Resume, [multicomment|Stack], Config); +comment(<<?solidus>>, Handler, Resume, [multicomment|_] = Stack, Config) -> + incomplete(comment, <<?solidus>>, Handler, Resume, Stack, Config); +comment(<<?star, ?solidus, Rest/binary>>, Handler, Resume, [multicomment|Stack], Config) -> + case Stack of + [multicomment|_] -> comment(Rest, Handler, Resume, Stack, Config); + _ -> resume(Rest, Resume, Handler, unused, Stack, Config) + end; +comment(<<?star>>, Handler, Resume, [multicomment|_] = Stack, Config) -> + incomplete(comment, <<?star>>, Handler, Resume, Stack, Config); +comment(<<_/utf8, Rest/binary>>, Handler, Resume, Stack, Config) -> + comment(Rest, Handler, Resume, Stack, Config); +comment(<<_, Rest/binary>>, Handler, Resume, Stack, Config=#config{strict_utf8=false}) -> + comment(Rest, Handler, Resume, Stack, Config); +comment(<<>>, Handler, done, [Comment], Config=#config{stream=false}) + when Comment == comment; Comment == multicomment -> + resume(<<>>, done, Handler, unused, [], Config); +comment(<<>>, Handler, Resume, Stack, Config) -> + incomplete(comment, <<>>, Handler, Resume, Stack, Config); +comment(Bin, Handler, Resume, Stack, Config) -> + ?error(comment, Bin, Handler, Resume, Stack, Config). + + +maybe_done(<<Rest/binary>>, Handler, [], Config) -> + done(Rest, handle_event(end_json, Handler, Config), [], Config); +maybe_done(<<?space, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, Handler, Stack, Config); +maybe_done(<<?end_object, Rest/binary>>, Handler, [object|Stack], Config) -> + maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config); +maybe_done(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) -> + maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config); +maybe_done(<<?comma, Rest/binary>>, Handler, [object|Stack], Config) -> + key(Rest, Handler, [key|Stack], Config); +maybe_done(<<?comma, Rest/binary>>, Handler, [array|_] = Stack, Config) -> + value(Rest, Handler, Stack, Config); +maybe_done(<<?newline, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, Handler, Stack, Config); +maybe_done(<<?tab, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, Handler, Stack, Config); +maybe_done(<<?cr, Rest/binary>>, Handler, Stack, Config) -> + maybe_done(Rest, Handler, Stack, Config); +maybe_done(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(maybe_done, <<?solidus, Rest/binary>>, Handler, Stack, Config); +maybe_done(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, maybe_done, [comment|Stack], Config); +maybe_done(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, maybe_done, [multicomment|Stack], Config); +maybe_done(<<?solidus>>, Handler, Stack, Config) -> + incomplete(maybe_done, <<?solidus>>, Handler, Stack, Config); +maybe_done(<<>>, Handler, Stack, Config) when length(Stack) > 0 -> + incomplete(maybe_done, <<>>, Handler, Stack, Config); +maybe_done(Bin, Handler, Stack, Config) -> + ?error(maybe_done, Bin, Handler, Stack, Config). + + +done(<<?space, Rest/binary>>, Handler, [], Config) -> + done(Rest, Handler, [], Config); +done(<<?newline, Rest/binary>>, Handler, [], Config) -> + done(Rest, Handler, [], Config); +done(<<?tab, Rest/binary>>, Handler, [], Config) -> + done(Rest, Handler, [], Config); +done(<<?cr, Rest/binary>>, Handler, [], Config) -> + done(Rest, Handler, [], Config); +done(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) -> + ?error(done, <<?solidus, Rest/binary>>, Handler, Stack, Config); +done(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, done, [comment|Stack], Config); +done(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) -> + comment(Rest, Handler, done, [multicomment|Stack], Config); +done(<<?solidus>>, Handler, Stack, Config) -> + incomplete(done, <<?solidus>>, Handler, Stack, Config); +done(Bin, {_Handler, State}, _Stack, #config{return_tail=true}) -> + {with_tail,State, Bin}; +done(<<>>, {Handler, State}, [], Config=#config{stream=true}) -> + incomplete(done, <<>>, {Handler, State}, [], Config); +done(<<>>, {_Handler, State}, [], _Config) -> State; +done(Bin, {Handler, State}, _Stack, Config=#config{multi_term=true}) -> + value(Bin, {Handler, Handler:reset(State)}, [], Config); +done(Bin, Handler, Stack, Config) -> ?error(done, Bin, Handler, Stack, Config). + + + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +json_to_bytes(JSON) -> json_to_bytes(JSON, []). + +json_to_bytes(<<>>, Acc) -> [<<>>] ++ lists:reverse(Acc); +json_to_bytes(<<X, Rest/binary>>, Acc) -> json_to_bytes(Rest, [<<X>>] ++ Acc). + + +decode(JSON) -> decode(JSON, []). +decode(JSON, Config) -> (decoder(jsx, [], Config))(JSON). + + +incremental_decode(JSON) -> incremental_decode(JSON, []). +incremental_decode(JSON, Config) -> + Final = lists:foldl( + fun(Byte, Decoder) -> {incomplete, F} = Decoder(Byte), F end, + decoder(jsx, [], [stream] ++ Config), + json_to_bytes(JSON) + ), + Final(end_stream). + + +%% all these numbers have different representation in erlang than in javascript and +%% do not roundtrip like most integers/floats +special_number_test_() -> + Cases = [ + % {title, test form, json, opt flags} + {"-0", [{integer, 0}, end_json], <<"-0">>}, + {"-0.0", [{float, 0.0}, end_json], <<"-0.0">>}, + {"0e0", [{float, 0.0}, end_json], <<"0e0">>}, + {"0e4", [{float, 0.0}, end_json], <<"0e4">>}, + {"1e0", [{float, 1.0}, end_json], <<"1e0">>}, + {"-1e0", [{float, -1.0}, end_json], <<"-1e0">>}, + {"-0e0", [{float, -0.0}, end_json], <<"-0e0">>}, + {"1e4", [{float, 1.0e4}, end_json], <<"1e4">>}, + {"number terminated by whitespace", + [start_array, {integer, 1}, end_array, end_json], + <<"[ 1 ]">> + }, + {"number terminated by comma", + [start_array, {integer, 1}, {integer, 1}, end_array, end_json], + <<"[ 1, 1 ]">> + }, + {"number terminated by comma in object", + [start_object, {key, <<"x">>}, {integer, 1}, {key, <<"y">>}, {integer, 1}, end_object, end_json], + <<"{\"x\": 1, \"y\": 1}">> + } + ], + [{Title, ?_assertEqual(Events, decode(JSON))} + || {Title, Events, JSON} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON))} + || {Title, Events, JSON} <- Cases + ]. + + +comments_test_() -> + Cases = [ + % {title, test form, json, opt flags} + {"preceeding // comment", + [start_array, end_array, end_json], + <<"// comment ", ?newline, "[]">> + }, + {"preceeding /**/ comment", + [start_array, end_array, end_json], + <<"/* comment */[]">> + }, + {"trailing // comment", + [start_array, end_array, end_json], + <<"[]// comment", ?newline>> + }, + {"trailing // comment (no newline)", + [start_array, end_array, end_json], + <<"[]// comment">> + }, + {"trailing /**/ comment", + [start_array, end_array, end_json], + <<"[] /* comment */">> + }, + {"// comment inside array", + [start_array, end_array, end_json], + <<"[ // comment", ?newline, "]">> + }, + {"/**/ comment inside array", + [start_array, end_array, end_json], + <<"[ /* comment */ ]">> + }, + {"// comment at beginning of array", + [start_array, {literal, true}, end_array, end_json], + <<"[ // comment", ?newline, "true", ?newline, "]">> + }, + {"/**/ comment at beginning of array", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* comment */ true ]">> + }, + {"// comment at end of array", + [start_array, {literal, true}, end_array, end_json], + <<"[ true // comment", ?newline, "]">> + }, + {"/**/ comment at end of array", + [start_array, {literal, true}, end_array, end_json], + <<"[ true /* comment */ ]">> + }, + {"// comment midarray (post comma)", + [start_array, {literal, true}, {literal, false}, end_array, end_json], + <<"[ true, // comment", ?newline, "false ]">> + }, + {"/**/ comment midarray (post comma)", + [start_array, {literal, true}, {literal, false}, end_array, end_json], + <<"[ true, /* comment */ false ]">> + }, + {"// comment midarray (pre comma)", + [start_array, {literal, true}, {literal, false}, end_array, end_json], + <<"[ true// comment", ?newline, ", false ]">> + }, + {"/**/ comment midarray (pre comma)", + [start_array, {literal, true}, {literal, false}, end_array, end_json], + <<"[ true/* comment */, false ]">> + }, + {"// comment inside object", + [start_object, end_object, end_json], + <<"{ // comment", ?newline, "}">> + }, + {"/**/ comment inside object", + [start_object, end_object, end_json], + <<"{ /* comment */ }">> + }, + {"// comment at beginning of object", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ // comment", ?newline, " \"key\": true", ?newline, "}">> + }, + {"/**/ comment at beginning of object", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ /* comment */ \"key\": true }">> + }, + {"// comment at end of object", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\": true // comment", ?newline, "}">> + }, + {"/**/ comment at end of object", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\": true /* comment */ }">> + }, + {"// comment midobject (post comma)", + [ + start_object, + {key, <<"x">>}, + {literal, true}, + {key, <<"y">>}, + {literal, false}, + end_object, + end_json + ], + <<"{ \"x\": true, // comment", ?newline, "\"y\": false }">> + }, + {"/**/ comment midobject (post comma)", + [ + start_object, + {key, <<"x">>}, + {literal, true}, + {key, <<"y">>}, + {literal, false}, + end_object, + end_json + ], + <<"{ \"x\": true, /* comment */", ?newline, "\"y\": false }">> + }, + {"// comment midobject (pre comma)", + [ + start_object, + {key, <<"x">>}, + {literal, true}, + {key, <<"y">>}, + {literal, false}, + end_object, + end_json + ], + <<"{ \"x\": true// comment", ?newline, ", \"y\": false }">> + }, + {"/**/ comment midobject (pre comma)", + [ + start_object, + {key, <<"x">>}, + {literal, true}, + {key, <<"y">>}, + {literal, false}, + end_object, + end_json + ], + <<"{ \"x\": true/* comment */", ?newline, ", \"y\": false }">> + }, + {"// comment precolon", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\" // comment", ?newline, ": true }">> + }, + {"/**/ comment precolon", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\"/* comment */: true }">> + }, + {"// comment postcolon", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\": // comment", ?newline, " true }">> + }, + {"/**/ comment postcolon", + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + <<"{ \"key\":/* comment */ true }">> + }, + {"// comment terminating zero", + [start_array, {integer, 0}, end_array, end_json], + <<"[ 0// comment", ?newline, "]">> + }, + {"// comment terminating integer", + [start_array, {integer, 1}, end_array, end_json], + <<"[ 1// comment", ?newline, "]">> + }, + {"// comment terminating float", + [start_array, {float, 1.0}, end_array, end_json], + <<"[ 1.0// comment", ?newline, "]">> + }, + {"// comment terminating exp", + [start_array, {float, 1.0e1}, end_array, end_json], + <<"[ 1e1// comment", ?newline, "]">> + }, + {"/**/ comment terminating zero", + [start_array, {integer, 0}, end_array, end_json], + <<"[ 0/* comment */ ]">> + }, + {"/**/ comment terminating integer", + [start_array, {integer, 1}, end_array, end_json], + <<"[ 1/* comment */ ]">> + }, + {"/**/ comment terminating float", + [start_array, {float, 1.0}, end_array, end_json], + <<"[ 1.0/* comment */ ]">> + }, + {"/**/ comment terminating exp", + [start_array, {float, 1.0e1}, end_array, end_json], + <<"[ 1e1/* comment */ ]">> + }, + {"/**/ comment following /**/ comment", + [start_array, {literal, true}, end_array, end_json], + <<"[/* comment *//* comment */true]">> + }, + {"/**/ comment following // comment", + [start_array, {literal, true}, end_array, end_json], + <<"[// comment", ?newline, "/* comment */true]">> + }, + {"// comment following /**/ comment", + [start_array, {literal, true}, end_array, end_json], + <<"[/* comment */// comment", ?newline, "true]">> + }, + {"// comment following // comment", + [start_array, {literal, true}, end_array, end_json], + <<"[// comment", ?newline, "// comment", ?newline, "true]">> + }, + {"/**/ comment inside /**/ comment", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* /* comment */ */ true ]">> + }, + {"/**/ comment with /", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* / */ true ]">> + }, + {"/**/ comment with *", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* * */ true ]">> + }, + {"// comment with badutf", + [start_array, {literal, true}, end_array, end_json], + <<"[ // comment ", 16#00c0, " ", ?newline, "true]">> + }, + {"/**/ comment with badutf", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* comment ", 16#00c0, " */ true]">> + }, + {"/**/ comment with badutf preceeded by /", + [start_array, {literal, true}, end_array, end_json], + <<"[ /* comment /", 16#00c0, " */ true]">> + } + ], + [{Title, ?_assertEqual(Events, decode(JSON))} + || {Title, Events, JSON} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON))} + || {Title, Events, JSON} <- Cases + ] ++ + % error when `{strict, [comments]}` is present + [{Title, ?_assertError(badarg, decode(JSON, [{strict, [comments]}]))} + || {Title, _Events, JSON} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertError( + badarg, + incremental_decode(JSON, [{strict, [comments]}]) + )} || {Title, _Events, JSON} <- Cases + ]. + + +no_comments_test_() -> + Cases = [ + {"// comment with badutf", + badarg, + <<"[ // comment ", 16#00c0, " ", ?newline, "true]">>, + [{strict, [utf8]}] + }, + {"/**/ comment with badutf", + badarg, + <<"[ /* comment ", 16#00c0, " */ true]">>, + [{strict, [utf8]}] + }, + {"/**/ comment with badutf preceeded by /", + badarg, + <<"[ /* comment /", 16#00c0, " */ true]">>, + [{strict, [utf8]}] + } + ], + [{Title, ?_assertError(Error, decode(JSON, Config))} + || {Title, Error, JSON, Config} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertError(Error, incremental_decode(JSON, Config))} + || {Title, Error, JSON, Config} <- Cases + ]. + + +% doing the full unicode range takes foreverrrrrrr so just do boundaries +% excludes characters that may need escaping +codepoints() -> + lists:seq(0, 32) ++ + [32, 33] ++ + lists:seq(35, 46) ++ + lists:seq(48, 91) ++ + lists:seq(93, 127) ++ + [16#2027, 16#202a, 16#d7ff, 16#e000] ++ + lists:seq(16#fdd0, 16#ffff) ++ + [16#10000, 16#20000, 16#30000, 16#40000, 16#50000] ++ + [16#60000, 16#70000, 16#80000, 16#90000, 16#a0000, 16#b0000] ++ + [16#c0000, 16#d0000, 16#e0000, 16#f0000, 16#100000]. + + +surrogates() -> lists:seq(16#d800, 16#dfff). + + +%% erlang refuses to decode certain codepoints, so fake them all +to_fake_utf8(N) when N < 16#0080 -> <<34/utf8, N:8, 34/utf8>>; +to_fake_utf8(N) when N < 16#0800 -> + <<0:5, Y:5, X:6>> = <<N:16>>, + <<34/utf8, 2#110:3, Y:5, 2#10:2, X:6, 34/utf8>>; +to_fake_utf8(N) when N < 16#10000 -> + <<Z:4, Y:6, X:6>> = <<N:16>>, + <<34/utf8, 2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6, 34/utf8>>; +to_fake_utf8(N) -> + <<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>, + <<34/utf8, 2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6, 34/utf8>>. + + +clean_string_test_() -> + Clean = codepoints(), + Dirty = surrogates(), + % clean codepoints + [{"clean u+" ++ integer_to_list(Codepoint, 16), ?_assertEqual( + [{string, <<Codepoint/utf8>>}, end_json], + decode(<<34/utf8, Codepoint/utf8, 34/utf8>>) + )} || Codepoint <- Clean + ] ++ + % bad codepoints replaced by u+FFFD + [{"clean u+" ++ integer_to_list(Codepoint, 16), ?_assertEqual( + [{string, <<16#fffd/utf8>>}, end_json], + decode(to_fake_utf8(Codepoint)) + )} || Codepoint <- Dirty + ] ++ + % bad codepoints that cause errors + [{"dirty u+" ++ integer_to_list(Codepoint, 16), ?_assertError( + badarg, + decode(to_fake_utf8(Codepoint), [{strict, [utf8]}]) + )} || Codepoint <- Dirty + ]. + + +dirty_string_test_() -> + Cases = [ + {"dirty \\n", + [start_array, {string, <<"\\n">>}, end_array, end_json], + <<"[\"\\n\"]">>, + [dirty_strings] + }, + {"dirty \\uwxyz", + [start_array, {string, <<"\\uwxyz">>}, end_array, end_json], + <<"[\"\\uwxyz\"]">>, + [dirty_strings] + }, + {"dirty \\x23", + [start_array, {string, <<"\\x23">>}, end_array, end_json], + <<"[\"\\x23\"]">>, + [dirty_strings] + }, + {"dirty 0", + [start_array, {string, <<0>>}, end_array, end_json], + <<"[\"", 0, "\"]">>, + [dirty_strings] + }, + {"dirty 0\\\"0", + [start_array, {string, <<0, ?rsolidus, ?doublequote, 0>>}, end_array, end_json], + <<"[\"", 0, ?rsolidus, ?doublequote, 0, "\"]">>, + [dirty_strings] + }, + {"dirty 0\\\\\"0", + [start_array, {string, <<0, ?rsolidus, ?rsolidus, ?doublequote, 0>>}, end_array, end_json], + <<"[\"", 0, ?rsolidus, ?rsolidus, ?doublequote, 0, "\"]">>, + [dirty_strings] + }, + {"dirty 16#d800", + [start_array, {string, <<237, 160, 128>>}, end_array, end_json], + <<"[\"", 237, 160, 128, "\"]">>, + [dirty_strings] + }, + {"dirty /", + [start_array, {string, <<$/>>}, end_array, end_json], + <<"[\"", $/, "\"]">>, + [dirty_strings, escaped_forward_slashes] + }, + {"dirty <<194, 129>>", + [start_array, {string, <<194, 129>>}, end_array, end_json], + <<"[\"", 194, 129, "\"]">>, + [dirty_strings] + } + ], + [{Title, ?_assertEqual(Events, decode(JSON, Config))} + || {Title, Events, JSON, Config} <- Cases + ] ++ + % ensure `dirty_strings` and `strict` interact properly + [{Title, ?_assertEqual(Events, decode(JSON, Config ++ [strict]))} + || {Title, Events, JSON, Config} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON, Config))} + || {Title, Events, JSON, Config} <- Cases + ]. + + +bad_utf8_test_() -> + Cases = [ + {"orphan continuation byte u+0080", <<16#fffd/utf8>>, <<16#0080>>}, + {"orphan continuation byte u+00bf", <<16#fffd/utf8>>, <<16#00bf>>}, + {"2 continuation bytes", + binary:copy(<<16#fffd/utf8>>, 2), + <<(binary:copy(<<16#0080>>, 2))/binary>> + }, + {"3 continuation bytes", + binary:copy(<<16#fffd/utf8>>, 3), + <<(binary:copy(<<16#0080>>, 3))/binary>> + }, + {"4 continuation bytes", + binary:copy(<<16#fffd/utf8>>, 4), + <<(binary:copy(<<16#0080>>, 4))/binary>> + }, + {"5 continuation bytes", + binary:copy(<<16#fffd/utf8>>, 5), + <<(binary:copy(<<16#0080>>, 5))/binary>> + }, + {"6 continuation bytes", + binary:copy(<<16#fffd/utf8>>, 6), + <<(binary:copy(<<16#0080>>, 6))/binary>> + }, + {"all continuation bytes", + binary:copy(<<16#fffd/utf8>>, length(lists:seq(16#0080, 16#00bf))), + <<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>> + }, + {"lonely start byte", <<16#fffd/utf8>>, <<16#00c0>>}, + {"lonely start bytes (2 byte)", + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + <<16#00c0, 32, 16#00df>> + }, + {"lonely start bytes (3 byte)", + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + <<16#00e0, 32, 16#00ef>> + }, + {"lonely start bytes (4 byte)", + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + <<16#00f0, 32, 16#00f7>> + }, + {"missing continuation byte (3 byte)", <<16#fffd/utf8, 32>>, <<224, 160, 32>>}, + {"missing continuation byte (4 byte missing one)", + <<16#fffd/utf8, 32>>, + <<240, 144, 128, 32>> + }, + {"missing continuation byte (4 byte missing two)", + <<16#fffd/utf8, 32>>, + <<240, 144, 32>> + }, + {"overlong encoding of u+002f (2 byte)", + <<16#fffd/utf8, 32>>, + <<16#c0, 16#af, 32>> + }, + {"overlong encoding of u+002f (3 byte)", + <<16#fffd/utf8, 32>>, + <<16#e0, 16#80, 16#af, 32>> + }, + {"overlong encoding of u+002f (4 byte)", + <<16#fffd/utf8, 32>>, + <<16#f0, 16#80, 16#80, 16#af, 32>> + }, + {"highest overlong 2 byte sequence", + <<16#fffd/utf8, 32>>, + <<16#c1, 16#bf, 32>> + }, + {"highest overlong 3 byte sequence", + <<16#fffd/utf8, 32>>, + <<16#e0, 16#9f, 16#bf, 32>> + }, + {"highest overlong 4 byte sequence", + <<16#fffd/utf8, 32>>, + <<16#f0, 16#8f, 16#bf, 16#bf, 32>> + } + ], + [{Title, ?_assertError( + badarg, + decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}]) + )} || {Title, _, JSON} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertError( + badarg, + incremental_decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}]) + )} || {Title, _, JSON} <- Cases + ] ++ + [{Title ++ " replaced", ?_assertEqual( + [{string, Replacement}, end_json], + decode(<<34, JSON/binary, 34>>) + )} || {Title, Replacement, JSON} <- Cases + ] ++ + [{Title ++ " replaced (incremental)", ?_assertEqual( + [{string, Replacement}, end_json], + incremental_decode(<<34, JSON/binary, 34>>) + )} || {Title, Replacement, JSON} <- Cases + ]. + + +unescape_test_() -> + Cases = [ + {"unescape backspace", <<"\b">>, <<"\\b"/utf8>>}, + {"unescape tab", <<"\t">>, <<"\\t"/utf8>>}, + {"unescape newline", <<"\n">>, <<"\\n"/utf8>>}, + {"unescape formfeed", <<"\f">>, <<"\\f"/utf8>>}, + {"unescape carriage return", <<"\r">>, <<"\\r"/utf8>>}, + {"unescape quote", <<"\"">>, <<"\\\""/utf8>>}, + {"unescape solidus", <<"/">>, <<"\\/"/utf8>>}, + {"unescape reverse solidus", <<"\\">>, <<"\\\\"/utf8>>}, + {"unescape control", <<0>>, <<"\\u0000"/utf8>>}, + {"unescape surrogate pair", <<16#10000/utf8>>, <<"\\ud800\\udc00"/utf8>>}, + {"unescape surrogate pair", <<16#10000/utf8>>, <<"\\uD800\\uDC00"/utf8>>}, + {"replace bad high surrogate", <<16#fffd/utf8>>, <<"\\udc00"/utf8>>}, + {"replace bad high surrogate", <<16#fffd/utf8>>, <<"\\uDC00"/utf8>>}, + {"replace naked high surrogate", + <<16#fffd/utf8, "hello world">>, + <<"\\ud800hello world"/utf8>> + }, + {"replace naked high surrogate", + <<16#fffd/utf8, "hello world">>, + <<"\\uD800hello world"/utf8>> + }, + {"replace naked low surrogate", + <<16#fffd/utf8, "hello world">>, + <<"\\udc00hello world"/utf8>> + }, + {"replace naked low surrogate", + <<16#fffd/utf8, "hello world">>, + <<"\\uDC00hello world"/utf8>> + }, + {"replace bad surrogate pair", <<16#fffd/utf8, 16#fffd/utf8>>, <<"\\ud800\\u0000">>}, + {"replace bad surrogate pair", <<16#fffd/utf8, 16#fffd/utf8>>, <<"\\uD800\\u0000">>} + ], + [{Title, ?_assertEqual([{string, Escaped}, end_json], decode(<<34, JSON/binary, 34>>))} + || {Title, Escaped, JSON} <- Cases + ] ++ + [{Title ++ " (incremental)", ?_assertEqual( + [{string, Escaped}, end_json], + incremental_decode(<<34, JSON/binary, 34>>) + )} || {Title, Escaped, JSON} <- Cases + ]. + + +bad_escaped_surrogate_test_() -> + Cases = [ + {"do not unescape bad high surrogate", <<"\\udc00">>}, + {"do not unescape naked high surrogate", <<"\\ud800hello world">>}, + {"do not unescape naked low surrogate", <<"\\udc00hello world">>}, + {"do not unescape bad surrogate pair", <<"\\ud800\\u0000">>} + ], + [{Title, ?_assertError(badarg, decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}]))} + || {Title, JSON} <- Cases + ]. + + +escape_test_() -> + Cases = [ + {"backspace", <<"\b">>, <<"\\b">>}, + {"tab", <<"\t">>, <<"\\t">>}, + {"newline", <<"\n">>, <<"\\n">>}, + {"formfeed", <<"\f">>, <<"\\f">>}, + {"carriage return", <<"\r">>, <<"\\r">>}, + {"quote", <<"\"">>, <<"\\\"">>}, + {"backslash", <<"\\">>, <<"\\\\">>}, + {"control", <<0>>, <<"\\u0000">>} + ], + [{"escape " ++ Title, ?_assertEqual( + [{string, Escaped}, end_json], + decode(<<34, Escaped/binary, 34>>, [escaped_strings]) + )} || {Title, _Unescaped, Escaped} <- Cases + ] ++ + [{"do not escape " ++ Title, ?_assertEqual( + [{string, Unescaped}, end_json], + decode(<<34, Escaped/binary, 34>>) + )} || {Title, Unescaped, Escaped} <- Cases + ]. + + +special_escape_test_() -> + Cases = [ + {"escape forward slash", <<"\\/">>, <<"/"/utf8>>, [escaped_forward_slashes]}, + {"do not escape forward slash", <<"/">>, <<"/"/utf8>>, []}, + {"escape jsonp", <<"\\u2028">>, <<16#2028/utf8>>, []}, + {"do not escape jsonp", <<16#2028/utf8>>, <<16#2028/utf8>>, [unescaped_jsonp]} + ], + [{Title, ?_assertEqual( + [{string, Expect}, end_json], + decode(<<34, Raw/binary, 34>>, [escaped_strings] ++ Config) + )} || {Title, Expect, Raw, Config} <- Cases + ]. + + +uescape_test_() -> + [ + {"\"\\u0080\"", ?_assertEqual( + [{string, <<"\\u0080">>}, end_json], + decode(<<34, 128/utf8, 34>>, [uescape]) + )}, + {"\"\\u8ca8\\u5481\\u3002\\u0091\\u0091\"", ?_assertEqual( + [{string, <<"\\u8ca8\\u5481\\u3002\\u0091\\u0091">>}, end_json], + decode( + <<34,232,178,168,229,146,129,227,128,130,194,145,194,145,34>>, + [uescape] + ) + )}, + {"\"\\ud834\\udd1e\"", ?_assertEqual( + [{string, <<"\\ud834\\udd1e">>}, end_json], + decode(<<34, 240, 157, 132, 158, 34>>, [uescape]) + )}, + {"\"\\ud83d\\ude0a\"", ?_assertEqual( + [{string, <<"\\ud83d\\ude0a">>}, end_json], + decode(<<34, 240, 159, 152, 138, 34>>, [uescape]) + )} + ]. + + +single_quoted_string_test_() -> + Cases = [ + {"single quoted string", [{string, <<"hello world">>}, end_json], <<39, "hello world", 39>>}, + {"single quoted string with embedded double quotes", + [{string, <<"quoth the raven, \"nevermore\"">>}, end_json], + <<39, "quoth the raven, \"nevermore\"", 39>> + }, + {"escaped single quote", + [{string, <<"quoth the raven, 'nevermore'">>}, end_json], + <<39, "quoth the raven, \\'nevermore\\'", 39>> + }, + {"single quoted key", + [start_object, + {key, <<"key">>}, {string, <<"value">>}, + {key, <<"another key">>}, {string, <<"another value">>}, + end_object, end_json], + <<"{'key':'value','another key':'another value'}">> + } + ], + [{Title, ?_assertEqual(Expect, decode(Raw, []))} || {Title, Expect, Raw} <- Cases] ++ + [{Title, ?_assertError( + badarg, + decode(Raw, [{strict, [single_quotes]}]) + )} || {Title, _Expect, Raw} <- Cases + ]. + + +embedded_single_quoted_string_test_() -> + [ + {"string with embedded single quotes", ?_assertEqual( + [{string, <<"quoth the raven, 'nevermore'">>}, end_json], + decode(<<34, "quoth the raven, 'nevermore'", 34>>, []) + )}, + {"string with embedded single quotes", ?_assertEqual( + [{string, <<"quoth the raven, 'nevermore'">>}, end_json], + decode(<<34, "quoth the raven, 'nevermore'", 34>>, [{strict, [single_quotes]}]) + )} + ]. + + +ignored_bad_escapes_test_() -> + [ + {"ignore unrecognized escape sequence", ?_assertEqual( + [{string, <<"\\x25">>}, end_json], + decode(<<"\"\\x25\"">>, []) + )} + ]. + + +bom_test_() -> + [ + {"bom", ?_assertEqual( + [start_array, end_array, end_json], + decode(<<16#ef, 16#bb, 16#bf, "[]"/utf8>>, []) + )} + ]. + + +trailing_comma_test_() -> + [ + {"trailing comma in object", ?_assertEqual( + [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json], + decode(<<"{\"key\": true,}">>, []) + )}, + {"strict trailing comma in object", ?_assertError( + badarg, + decode(<<"{\"key\": true,}">>, [{strict, [trailing_commas]}]) + )}, + {"two trailing commas in object", ?_assertError( + badarg, + decode(<<"{\"key\": true,,}">>, []) + )}, + {"comma in empty object", ?_assertError( + badarg, + decode(<<"{,}">>, []) + )}, + {"trailing comma in list", ?_assertEqual( + [start_array, {literal, true}, end_array, end_json], + decode(<<"[true,]">>, []) + )}, + {"strict trailing comma in list", ?_assertError( + badarg, + decode(<<"[true,]">>, [{strict, [trailing_commas]}]) + )}, + {"two trailing commas in list", ?_assertError( + badarg, + decode(<<"[true,,]">>, []) + )}, + {"comma in empty list", ?_assertError( + badarg, + decode(<<"[,]">>, []) + )} + ]. + + +incomplete_test_() -> + [ + {"stream false", ?_assertError( + badarg, + decode(<<"{">>) + )}, + {"stream true", ?_assertMatch( + {incomplete, _}, + decode(<<"{">>, [stream]) + )}, + {"complete input", ?_assertMatch( + {incomplete, _}, + decode(<<"{}">>, [stream]) + )} + ]. + + +error_test_() -> + Cases = [ + {"maybe_bom error", <<16#ef, 0>>}, + {"definitely_bom error", <<16#ef, 16#bb, 0>>}, + {"object error", <<"{"/utf8, 0>>}, + {"colon error", <<"{\"\""/utf8, 0>>}, + {"key error", <<"{\"\":1,"/utf8, 0>>}, + {"value error", <<0>>}, + {"negative error", <<"-"/utf8, 0>>}, + {"zero error", <<"0"/utf8, 0>>}, + {"integer error", <<"1"/utf8, 0>>}, + {"decimal error", <<"1.0"/utf8, 0>>}, + {"e error", <<"1e"/utf8, 0>>}, + {"ex error", <<"1e+"/utf8, 0>>}, + {"exp error", <<"1e1"/utf8, 0>>}, + {"exp error", <<"1.0e1"/utf8, 0>>}, + {"exp error", <<"1.e"/utf8>>}, + {"true error", <<"tru"/utf8, 0>>}, + {"false error", <<"fals"/utf8, 0>>}, + {"null error", <<"nul"/utf8, 0>>}, + {"maybe_done error", <<"[[]"/utf8, 0>>}, + {"done error", <<"[]"/utf8, 0>>} + ], + [{Title, ?_assertError(badarg, decode(State))} || {Title, State} <- Cases]. + + +custom_incomplete_handler_test_() -> + [ + {"custom incomplete handler", ?_assertError( + incomplete, + decode(<<>>, [{incomplete_handler, fun(_, _, _) -> erlang:error(incomplete) end}, stream]) + )} + ]. + + +return_tail_test_() -> + [ + {"return_tail with tail", ?_assertEqual( + {with_tail,[{}],<<"3">>}, + jsx:decode(<<"{} 3">>, [return_tail]) + )}, + {"return_tail without tail", ?_assertEqual( + {with_tail,[{}],<<"">>}, + jsx:decode(<<"{}">>, [return_tail]) + )}, + {"return_tail with trimmed whitespace", ?_assertEqual( + {with_tail,[{}],<<"">>}, + jsx:decode(<<"{} ">>, [return_tail]) + )}, + {"return_tail and streaming", ?_assertEqual( + {with_tail,[{}],<<"3">>}, + begin + {incomplete, F} = jsx:decode(<<"{">>, [return_tail, stream]), + F(<<"} 3">>) + end + )}, + {"return_tail and streaming", ?_assertEqual( + {with_tail,[{}],<<"">>}, + begin + %% In case of infinite stream of objects a user does not know + %% when to call F(end_stream). + %% So, return_tail overwrites conservative stream end. + %% This means that we don't need to call end_stream explicitly. + {incomplete, F} = jsx:decode(<<"{">>, [return_tail, stream]), + F(<<"}">>) + end + )} + ]. + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_encoder.erl b/server/_build/default/plugins/jsx/src/jsx_encoder.erl new file mode 100644 index 0000000..39140d8 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_encoder.erl @@ -0,0 +1,127 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_encoder). + +-export([encoder/3, encode/1, encode/2]). + +-spec encoder(Handler::module(), State::any(), Config::list()) -> jsx:encoder(). + +encoder(Handler, State, Config) -> + Parser = jsx:parser(Handler, State, Config), + fun(Term) -> Parser(encode(Term) ++ [end_json]) end. + + +-spec encode(Term::any()) -> any(). + +encode(Term) -> encode(Term, ?MODULE). + + +-spec encode(Term::any(), EntryPoint::module()) -> any(). + +-ifndef(maps_support). +encode(Term, EntryPoint) -> encode_(Term, EntryPoint). +-endif. + +-ifdef(maps_support). +encode(Map, _EntryPoint) when is_map(Map), map_size(Map) < 1 -> + [start_object, end_object]; +encode(Term, EntryPoint) when is_map(Term) -> + [start_object] ++ unpack(Term, EntryPoint); +encode(Term, EntryPoint) -> encode_(Term, EntryPoint). +-endif. + +encode_([], _EntryPoint) -> [start_array, end_array]; +encode_([{}], _EntryPoint) -> [start_object, end_object]; + +%% datetime special case +encode_([{{_,_,_},{_,_,_}} = DateTime|Rest], EntryPoint) -> + [start_array] ++ [DateTime] ++ unhitch(Rest, EntryPoint); +encode_([{_, _}|_] = Term, EntryPoint) -> + [start_object] ++ unzip(Term, EntryPoint); +encode_(Term, EntryPoint) when is_list(Term) -> + [start_array] ++ unhitch(Term, EntryPoint); + +encode_(Else, _EntryPoint) -> [Else]. + + +unzip([{K, V}|Rest], EntryPoint) when is_integer(K); is_binary(K); is_atom(K) -> + [K] ++ EntryPoint:encode(V, EntryPoint) ++ unzip(Rest, EntryPoint); +unzip([], _) -> [end_object]; +unzip(_, _) -> erlang:error(badarg). + + +unhitch([V|Rest], EntryPoint) -> + EntryPoint:encode(V, EntryPoint) ++ unhitch(Rest, EntryPoint); +unhitch([], _) -> [end_array]. + + +-ifdef(maps_support). +unpack(Map, EntryPoint) -> unpack(Map, maps:keys(Map), EntryPoint). + +unpack(Map, [K|Rest], EntryPoint) when is_integer(K); is_binary(K); is_atom(K) -> + [K] ++ EntryPoint:encode(maps:get(K, Map), EntryPoint) ++ unpack(Map, Rest, EntryPoint); +unpack(_, [], _) -> [end_object]. +-endif. + + + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +parser(Term, Opts) -> (jsx:parser(jsx, [], Opts))(Term). + + +error_test_() -> + [ + {"value error", ?_assertError(badarg, parser(self(), []))}, + {"string error", ?_assertError(badarg, parser(<<239, 191, 191>>, [strict]))} + ]. + +custom_error_handler_test_() -> + Error = fun(Term, {_, State, _, _}, _) -> {State, Term} end, + [ + {"value error", ?_assertEqual( + {value, [self()]}, + parser(self(), [{error_handler, Error}]) + )}, + {"string error", ?_assertEqual( + {value, [{string, <<237, 160, 128>>}]}, + parser(<<237, 160, 128>>, [{error_handler, Error}, strict]) + )} + ]. + +improper_lists_test_() -> + [ + {"improper proplist", ?_assertError( + badarg, + encode([{<<"key">>, <<"value">>}, false]) + )}, + {"improper list", ?_assertError( + badarg, + encode([{literal, true}, false, null]) + )} + ]. + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_parser.erl b/server/_build/default/plugins/jsx/src/jsx_parser.erl new file mode 100644 index 0000000..ca341c0 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_parser.erl @@ -0,0 +1,1214 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_parser). + +-export([parser/3, resume/5]). +-export([init/1, handle_event/2]). + + +-spec parser(Handler::module(), State::any(), Config::list()) -> jsx:parser(). + +parser(Handler, State, Config) -> + fun(Tokens) -> value(Tokens, {Handler, Handler:init(State)}, [], jsx_config:parse_config(Config)) end. + + +%% resume allows continuation from interrupted decoding without having to explicitly export +%% all states +-spec resume( + Rest::jsx:token(), + State::atom(), + Handler::{atom(), any()}, + Stack::list(atom()), + Config::jsx:config() + ) -> jsx:parser() | {incomplete, jsx:parser()}. + +resume(Rest, State, Handler, Stack, Config) -> + case State of + value -> value(Rest, Handler, Stack, Config); + object -> object(Rest, Handler, Stack, Config); + array -> array(Rest, Handler, Stack, Config); + maybe_done -> maybe_done(Rest, Handler, Stack, Config); + done -> done(Rest, Handler, Stack, Config) + end. + + +-include("jsx_config.hrl"). + + +%% error, incomplete and event macros +-ifndef(error). +-define(error(State, Terms, Handler, Stack, Config), + case Config#config.error_handler of + false -> erlang:error(badarg); + F -> F(Terms, {parser, State, Handler, Stack}, jsx_config:config_to_list(Config)) + end + +). +-endif. + + +incomplete(State, Handler, Stack, Config=#config{stream=false}) -> + ?error(State, [], Handler, Stack, Config); +incomplete(State, Handler, Stack, Config=#config{incomplete_handler=false}) -> + {incomplete, fun(End) when End == end_stream; End == end_json -> + case resume([end_json], State, Handler, Stack, Config) of + {incomplete, _} -> ?error(State, [], Handler, Stack, Config); + Else -> Else + end; + (Tokens) -> + resume(Tokens, State, Handler, Stack, Config) + end + }; +incomplete(State, Handler, Stack, Config=#config{incomplete_handler=F}) -> + F([], {parser, State, Handler, Stack}, jsx_config:config_to_list(Config)). + + +handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}. + + +value([String|Tokens], Handler, Stack, Config) when is_binary(String) -> + try clean_string(String, Config) of Clean -> + maybe_done(Tokens, handle_event({string, Clean}, Handler, Config), Stack, Config) + catch error:badarg -> + ?error(value, [{string, String}|Tokens], Handler, Stack, Config) + end; +value([true|Tokens], Handler, Stack, Config) -> + maybe_done(Tokens, handle_event({literal, true}, Handler, Config), Stack, Config); +value([false|Tokens], Handler, Stack, Config) -> + maybe_done(Tokens, handle_event({literal, false}, Handler, Config), Stack, Config); +value([null|Tokens], Handler, Stack, Config) -> + maybe_done(Tokens, handle_event({literal, null}, Handler, Config), Stack, Config); +value([start_object|Tokens], Handler, Stack, Config) -> + object(Tokens, handle_event(start_object, Handler, Config), [object|Stack], Config); +value([start_array|Tokens], Handler, Stack, Config) -> + array(Tokens, handle_event(start_array, Handler, Config), [array|Stack], Config); +value([Number|Tokens], Handler, Stack, Config) when is_integer(Number) -> + maybe_done(Tokens, handle_event({integer, Number}, Handler, Config), Stack, Config); +value([Number|Tokens], Handler, Stack, Config) when is_float(Number) -> + maybe_done(Tokens, handle_event({float, Number}, Handler, Config), Stack, Config); +value([{raw, Raw}|Tokens], Handler, Stack, Config) when is_binary(Raw) -> + value((jsx:decoder(?MODULE, [], []))(Raw) ++ Tokens, Handler, Stack, Config); +value([{_,_,_}=Timestamp|Tokens], Handler, Stack, Config) -> + {{Year, Month, Day}, {Hour, Min, Sec}} = calendar:now_to_datetime( + Timestamp), + value([{string, unicode:characters_to_binary(io_lib:format( + "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ", + [Year, Month, Day, Hour, Min, Sec] + ))}|Tokens], + Handler, + Stack, + Config + ); +value([{{Year, Month, Day}, {Hour, Min, Sec}}|Tokens], Handler, Stack, Config) +when is_integer(Year), is_integer(Month), is_integer(Day), is_integer(Hour), is_integer(Min), is_integer(Sec) -> + value([{string, unicode:characters_to_binary(io_lib:format( + "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ", + [Year, Month, Day, Hour, Min, Sec] + ))}|Tokens], + Handler, + Stack, + Config + ); +value([{{Year, Month, Day}, {Hour, Min, Sec}}|Tokens], Handler, Stack, Config) +when is_integer(Year), is_integer(Month), is_integer(Day), is_integer(Hour), is_integer(Min), is_float(Sec) -> + value([{string, unicode:characters_to_binary(io_lib:format( + "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~9.6.0fZ", + [Year, Month, Day, Hour, Min, Sec] + ))}|Tokens], + Handler, + Stack, + Config + ); +value([{literal, Value}|Tokens], Handler, Stack, Config) +when Value == true; Value == false; Value == null -> + value([Value] ++ Tokens, Handler, Stack, Config); +value([{integer, Value}|Tokens], Handler, Stack, Config) +when is_integer(Value) -> + value([Value] ++ Tokens, Handler, Stack, Config); +value([{float, Value}|Tokens], Handler, Stack, Config) +when is_float(Value) -> + value([Value] ++ Tokens, Handler, Stack, Config); +value([{string, Value}|Tokens], Handler, Stack, Config) +when is_binary(Value); is_atom(Value) -> + value([Value] ++ Tokens, Handler, Stack, Config); +value([{number, Value}|Tokens], Handler, Stack, Config) +when is_float(Value); is_integer(Value) -> + value([Value] ++ Tokens, Handler, Stack, Config); +value([String|Tokens], Handler, Stack, Config) when is_atom(String) -> + value([{string, atom_to_binary(String, utf8)}] ++ Tokens, Handler, Stack, Config); +value([], Handler, Stack, Config) -> + incomplete(value, Handler, Stack, Config); +value(BadTokens, Handler, Stack, Config) when is_list(BadTokens) -> + ?error(value, BadTokens, Handler, Stack, Config); +value(Token, Handler, Stack, Config) -> + value([Token], Handler, Stack, Config). + + +object([end_object|Tokens], Handler, [object|Stack], Config) -> + maybe_done(Tokens, handle_event(end_object, Handler, Config), Stack, Config); +object([{key, Key}|Tokens], Handler, Stack, Config) +when is_atom(Key); is_binary(Key); is_integer(Key) -> + object([Key|Tokens], Handler, Stack, Config); +object([Key|Tokens], Handler, [object|Stack], Config) +when is_atom(Key); is_binary(Key); is_integer(Key) -> + try clean_string(fix_key(Key), Config) + of K -> + value( + Tokens, + handle_event({key, K}, Handler, Config), + [object|Stack], + Config + ) + catch error:badarg -> + ?error(object, [{string, Key}|Tokens], Handler, Stack, Config) + end; +object([], Handler, Stack, Config) -> + incomplete(object, Handler, Stack, Config); +object(Token, Handler, Stack, Config) -> + object([Token], Handler, Stack, Config). + + +array([end_array|Tokens], Handler, [array|Stack], Config) -> + maybe_done(Tokens, handle_event(end_array, Handler, Config), Stack, Config); +array([], Handler, Stack, Config) -> + incomplete(array, Handler, Stack, Config); +array(Tokens, Handler, Stack, Config) when is_list(Tokens) -> + value(Tokens, Handler, Stack, Config); +array(Token, Handler, Stack, Config) -> + array([Token], Handler, Stack, Config). + + +maybe_done([end_json], Handler, [], Config) -> + done([end_json], Handler, [], Config); +maybe_done(Tokens, Handler, [object|_] = Stack, Config) when is_list(Tokens) -> + object(Tokens, Handler, Stack, Config); +maybe_done(Tokens, Handler, [array|_] = Stack, Config) when is_list(Tokens) -> + array(Tokens, Handler, Stack, Config); +maybe_done([], Handler, Stack, Config) -> + incomplete(maybe_done, Handler, Stack, Config); +maybe_done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) -> + ?error(maybe_done, BadTokens, Handler, Stack, Config); +maybe_done(Token, Handler, Stack, Config) -> + maybe_done([Token], Handler, Stack, Config). + + +done([], Handler, [], Config=#config{stream=true}) -> + incomplete(done, Handler, [], Config); +done(Tokens, Handler, [], Config) when Tokens == [end_json]; Tokens == [] -> + {_, State} = handle_event(end_json, Handler, Config), + State; +done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) -> + ?error(done, BadTokens, Handler, Stack, Config); +done(Token, Handler, Stack, Config) -> + done([Token], Handler, Stack, Config). + + +fix_key(Key) when is_atom(Key) -> atom_to_binary(Key, utf8); +fix_key(Key) when is_integer(Key) -> list_to_binary(integer_to_list(Key)); +fix_key(Key) when is_binary(Key) -> Key. + + +clean_string(Bin, #config{dirty_strings=true}) -> Bin; +clean_string(Bin, Config) -> clean(Bin, [], Config). + + +%% unroll the control characters +clean(<<0, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(0, Config)], Config); +clean(<<1, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(1, Config)], Config); +clean(<<2, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(2, Config)], Config); +clean(<<3, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(3, Config)], Config); +clean(<<4, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(4, Config)], Config); +clean(<<5, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(5, Config)], Config); +clean(<<6, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(6, Config)], Config); +clean(<<7, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(7, Config)], Config); +clean(<<8, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(8, Config)], Config); +clean(<<9, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(9, Config)], Config); +clean(<<10, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(10, Config)], Config); +clean(<<11, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(11, Config)], Config); +clean(<<12, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(12, Config)], Config); +clean(<<13, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(13, Config)], Config); +clean(<<14, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(14, Config)], Config); +clean(<<15, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(15, Config)], Config); +clean(<<16, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(16, Config)], Config); +clean(<<17, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(17, Config)], Config); +clean(<<18, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(18, Config)], Config); +clean(<<19, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(19, Config)], Config); +clean(<<20, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(20, Config)], Config); +clean(<<21, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(21, Config)], Config); +clean(<<22, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(22, Config)], Config); +clean(<<23, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(23, Config)], Config); +clean(<<24, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(24, Config)], Config); +clean(<<25, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(25, Config)], Config); +clean(<<26, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(26, Config)], Config); +clean(<<27, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(27, Config)], Config); +clean(<<28, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(28, Config)], Config); +clean(<<29, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(29, Config)], Config); +clean(<<30, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(30, Config)], Config); +clean(<<31, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(31, Config)], Config); +clean(<<34, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(34, Config)], Config); +clean(<<47, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(47, Config)], Config); +clean(<<92, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(92, Config)], Config); +clean(<<X/utf8, Rest/binary>> = Bin, Acc, Config=#config{uescape=true}) -> + case X of + X when X < 16#80 -> start_count(Bin, Acc, Config); + _ -> clean(Rest, [Acc, json_escape_sequence(X)], Config) + end; +%% u+2028 +clean(<<226, 128, 168, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(16#2028, Config)], Config); +%% u+2029 +clean(<<226, 128, 169, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(16#2029, Config)], Config); +clean(<<_/utf8, _/binary>> = Bin, Acc, Config) -> start_count(Bin, Acc, Config); +%% surrogates +clean(<<237, X, _, Rest/binary>>, Acc, Config) when X >= 160 -> + clean(Rest, [Acc, maybe_replace(surrogate, Config)], Config); +%% overlong encodings and missing continuations of a 2 byte sequence +clean(<<X, Rest/binary>>, Acc, Config) when X >= 192, X =< 223 -> + clean(strip_continuations(Rest, 1), [Acc, maybe_replace(badutf, Config)], Config); +%% overlong encodings and missing continuations of a 3 byte sequence +clean(<<X, Rest/binary>>, Acc, Config) when X >= 224, X =< 239 -> + clean(strip_continuations(Rest, 2), [Acc, maybe_replace(badutf, Config)], Config); +%% overlong encodings and missing continuations of a 4 byte sequence +clean(<<X, Rest/binary>>, Acc, Config) when X >= 240, X =< 247 -> + clean(strip_continuations(Rest, 3), [Acc, maybe_replace(badutf, Config)], Config); +clean(<<_, Rest/binary>>, Acc, Config) -> + clean(Rest, [Acc, maybe_replace(badutf, Config)], Config); +clean(<<>>, Acc, _) -> iolist_to_binary(Acc). + + +start_count(Bin, Acc, Config) -> + Size = count(Bin, 0, Config), + <<Clean:Size/binary, Rest/binary>> = Bin, + clean(Rest, [Acc, Clean], Config). + + +%% again, unrolling ascii makes a huge difference. sadly +count(<<0, _/binary>>, N, _) -> N; +count(<<1, _/binary>>, N, _) -> N; +count(<<2, _/binary>>, N, _) -> N; +count(<<3, _/binary>>, N, _) -> N; +count(<<4, _/binary>>, N, _) -> N; +count(<<5, _/binary>>, N, _) -> N; +count(<<6, _/binary>>, N, _) -> N; +count(<<7, _/binary>>, N, _) -> N; +count(<<8, _/binary>>, N, _) -> N; +count(<<9, _/binary>>, N, _) -> N; +count(<<10, _/binary>>, N, _) -> N; +count(<<11, _/binary>>, N, _) -> N; +count(<<12, _/binary>>, N, _) -> N; +count(<<13, _/binary>>, N, _) -> N; +count(<<14, _/binary>>, N, _) -> N; +count(<<15, _/binary>>, N, _) -> N; +count(<<16, _/binary>>, N, _) -> N; +count(<<17, _/binary>>, N, _) -> N; +count(<<18, _/binary>>, N, _) -> N; +count(<<19, _/binary>>, N, _) -> N; +count(<<20, _/binary>>, N, _) -> N; +count(<<21, _/binary>>, N, _) -> N; +count(<<22, _/binary>>, N, _) -> N; +count(<<23, _/binary>>, N, _) -> N; +count(<<24, _/binary>>, N, _) -> N; +count(<<25, _/binary>>, N, _) -> N; +count(<<26, _/binary>>, N, _) -> N; +count(<<27, _/binary>>, N, _) -> N; +count(<<28, _/binary>>, N, _) -> N; +count(<<29, _/binary>>, N, _) -> N; +count(<<30, _/binary>>, N, _) -> N; +count(<<31, _/binary>>, N, _) -> N; +count(<<32, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<33, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<34, _/binary>>, N, _) -> N; +count(<<35, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<36, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<37, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<38, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<39, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<40, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<41, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<42, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<43, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<44, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<45, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<46, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<47, _/binary>>, N, _) -> N; +count(<<48, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<49, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<50, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<51, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<52, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<53, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<54, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<55, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<56, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<57, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<58, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<59, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<60, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<61, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<62, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<63, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<64, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<65, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<66, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<67, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<68, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<69, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<70, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<71, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<72, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<73, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<74, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<75, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<76, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<77, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<78, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<79, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<80, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<81, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<82, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<83, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<84, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<85, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<86, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<87, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<88, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<89, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<90, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<91, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<92, _/binary>>, N, _) -> N; +count(<<93, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<94, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<95, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<96, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<97, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<98, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<99, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<100, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<101, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<102, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<103, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<104, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<105, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<106, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<107, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<108, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<109, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<110, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<111, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<112, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<113, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<114, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<115, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<116, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<117, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<118, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<119, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<120, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<121, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<122, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<123, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<124, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<125, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<126, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<127, Rest/binary>>, N, Config) -> + count(Rest, N + 1, Config); +count(<<_/utf8, _/binary>>, N, #config{uescape=true}) -> N; +count(<<X/utf8, Rest/binary>>, N, Config) -> + case X of + X when X < 16#800 -> count(Rest, N + 2, Config); + 16#2028 -> N; + 16#2029 -> N; + X when X < 16#10000 -> count(Rest, N + 3, Config); + _ -> count(Rest, N + 4, Config) + end; +count(<<_, _/binary>>, N, _) -> N; +count(<<>>, N, _) -> N. + + +strip_continuations(Bin, 0) -> Bin; +strip_continuations(<<X, Rest/binary>>, N) when X >= 128, X =< 191 -> + strip_continuations(Rest, N - 1); +%% not a continuation byte +strip_continuations(Bin, _) -> Bin. + + +maybe_replace($\b, #config{escaped_strings=true}) -> <<$\\, $b>>; +maybe_replace($\t, #config{escaped_strings=true}) -> <<$\\, $t>>; +maybe_replace($\n, #config{escaped_strings=true}) -> <<$\\, $n>>; +maybe_replace($\f, #config{escaped_strings=true}) -> <<$\\, $f>>; +maybe_replace($\r, #config{escaped_strings=true}) -> <<$\\, $r>>; +maybe_replace($\", #config{escaped_strings=true}) -> <<$\\, $\">>; +maybe_replace($/, Config=#config{escaped_strings=true}) -> + case Config#config.escaped_forward_slashes of + true -> <<$\\, $/>>; + false -> <<$/>> + end; +maybe_replace($\\, #config{escaped_strings=true}) -> <<$\\, $\\>>; +maybe_replace(X, #config{escaped_strings=true}) when X < 32 -> + json_escape_sequence(X); +maybe_replace(X, Config=#config{escaped_strings=true}) when X == 16#2028; X == 16#2029 -> + case Config#config.unescaped_jsonp of + true -> <<X/utf8>>; + false -> json_escape_sequence(X) + end; +maybe_replace(Atom, #config{strict_utf8=true}) when is_atom(Atom) -> + erlang:error(badarg); +maybe_replace(surrogate, _Config) -> + <<16#fffd/utf8>>; +maybe_replace(badutf, _Config) -> + <<16#fffd/utf8>>; +maybe_replace(X, _Config) -> + <<X/utf8>>. + + +%% convert a codepoint to it's \uXXXX equiv. +json_escape_sequence(X) when X < 65536 -> + <<A:4, B:4, C:4, D:4>> = <<X:16>>, + <<$\\, $u, (to_hex(A)), (to_hex(B)), (to_hex(C)), (to_hex(D))>>; +json_escape_sequence(X) -> + Adjusted = X - 16#10000, + <<A:10, B:10>> = <<Adjusted:20>>, + [json_escape_sequence(A + 16#d800), json_escape_sequence(B + 16#dc00)]. + + +to_hex(10) -> $a; +to_hex(11) -> $b; +to_hex(12) -> $c; +to_hex(13) -> $d; +to_hex(14) -> $e; +to_hex(15) -> $f; +to_hex(X) -> X + 48. %% ascii "1" is [49], "2" is [50], etc... + + +%% for raw input +-spec init(proplists:proplist()) -> list(). + +init([]) -> []. + + +-spec handle_event(Event::any(), Acc::list()) -> list(). + +handle_event(end_json, State) -> lists:reverse(State); +handle_event(Event, State) -> [Event] ++ State. + + + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +parse(Events, Config) -> value(Events, {jsx, []}, [], jsx_config:parse_config(Config)). + + +error_test_() -> + [ + {"value error", ?_assertError(badarg, parse([self()], []))}, + {"maybe_done error", ?_assertError(badarg, parse([start_array, end_array, start_array, end_json], []))}, + {"done error", ?_assertError(badarg, parse([{string, <<"">>}, {literal, true}, end_json], []))}, + {"string error", ?_assertError(badarg, parse([{string, <<237, 160, 128>>}, end_json], [strict]))} + ]. + + +custom_error_handler_test_() -> + Error = fun(Rest, {_, State, _, _}, _) -> {State, Rest} end, + [ + {"value error", ?_assertEqual( + {value, [self()]}, + parse([self()], [{error_handler, Error}]) + )}, + {"maybe_done error", ?_assertEqual( + {maybe_done, [start_array, end_json]}, + parse([start_array, end_array, start_array, end_json], [{error_handler, Error}]) + )}, + {"done error", ?_assertEqual( + {maybe_done, [{literal, true}, end_json]}, + parse([{string, <<"">>}, {literal, true}, end_json], [{error_handler, Error}]) + )}, + {"string error", ?_assertEqual( + {value, [{string, <<237, 160, 128>>}, end_json]}, + parse([{string, <<237, 160, 128>>}, end_json], [{error_handler, Error}, strict]) + )} + ]. + + +incomplete_test_() -> + Cases = [ + {"incomplete value", []}, + {"incomplete object", [start_object]}, + {"incomplete array", [start_array]}, + {"incomplete maybe_done", [start_array, end_array]} + ], + [{Title, ?_assertError(badarg, parse(Events, []))} + || {Title, Events} <- Cases + ]. + + +custom_incomplete_handler_test_() -> + [ + {"custom incomplete handler", ?_assertError( + badarg, + parse([], [{incomplete_handler, fun(_, _, _) -> erlang:error(badarg) end}]) + )} + ]. + + +raw_test_() -> + Parse = fun(Events, Config) -> (parser(?MODULE, [], Config))(Events ++ [end_json]) end, + [ + {"raw empty list", ?_assertEqual( + [start_array, end_array], + Parse([{raw, <<"[]">>}], []) + )}, + {"raw empty object", ?_assertEqual( + [start_object, end_object], + Parse([{raw, <<"{}">>}], []) + )}, + {"raw chunk inside stream", ?_assertEqual( + [start_object, {key, <<"key">>}, start_array, {literal, true}, end_array, end_object], + Parse([start_object, {key, <<"key">>}, {raw, <<"[true]">>}, end_object], []) + )} + ]. + + +%% erlang refuses to encode certain codepoints, so fake them +to_fake_utf8(N) when N < 16#0080 -> <<N:8>>; +to_fake_utf8(N) when N < 16#0800 -> + <<0:5, Y:5, X:6>> = <<N:16>>, + <<2#110:3, Y:5, 2#10:2, X:6>>; +to_fake_utf8(N) when N < 16#10000 -> + <<Z:4, Y:6, X:6>> = <<N:16>>, + <<2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6>>; +to_fake_utf8(N) -> + <<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>, + <<2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6>>. + + +codepoints() -> + unicode:characters_to_binary( + [32, 33] + ++ lists:seq(35, 46) + ++ lists:seq(48, 91) + ++ lists:seq(93, 16#2027) + ++ lists:seq(16#202a, 16#d7ff) + ++ lists:seq(16#e000, 16#ffff) + ). + + +extended_codepoints() -> + unicode:characters_to_binary( + lists:seq(16#10000, 16#1ffff) ++ [ + 16#20000, 16#30000, 16#40000, 16#50000, 16#60000, + 16#70000, 16#80000, 16#90000, 16#a0000, 16#b0000, + 16#c0000, 16#d0000, 16#e0000, 16#f0000, 16#100000 + ] + ). + + +surrogates() -> [ to_fake_utf8(N) || N <- lists:seq(16#d800, 16#dfff) ]. + + +clean_string_helper(String) -> + try clean_string(String, #config{strict_utf8=true}) of Clean -> Clean + catch error:badarg -> {error, badarg} + end. + + +clean_string_test_() -> + [ + {"clean codepoints", ?_assertEqual( + codepoints(), + clean_string(codepoints(), #config{}) + )}, + {"clean extended codepoints", ?_assertEqual( + extended_codepoints(), + clean_string(extended_codepoints(), #config{}) + )}, + {"escape path codepoints", ?_assertEqual( + codepoints(), + clean_string(codepoints(), #config{escaped_strings=true}) + )}, + {"escape path extended codepoints", ?_assertEqual( + extended_codepoints(), + clean_string(extended_codepoints(), #config{escaped_strings=true}) + )}, + {"error surrogates", ?_assertEqual( + lists:duplicate(length(surrogates()), {error, badarg}), + lists:map(fun(Codepoint) -> clean_string_helper(Codepoint) end, surrogates()) + )}, + {"clean surrogates", ?_assertEqual( + lists:duplicate(length(surrogates()), <<16#fffd/utf8>>), + lists:map(fun(Codepoint) -> clean_string(Codepoint, #config{}) end, surrogates()) + )} + ]. + + +escape_test_() -> + [ + {"maybe_escape backspace", ?_assertEqual( + <<"\\b">>, + clean_string(<<16#0008/utf8>>, #config{escaped_strings=true}) + )}, + {"don't escape backspace", ?_assertEqual( + <<"\b">>, + clean_string(<<16#0008/utf8>>, #config{}) + )}, + {"maybe_escape tab", ?_assertEqual( + <<"\\t">>, + clean_string(<<16#0009/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape newline", ?_assertEqual( + <<"\\n">>, + clean_string(<<16#000a/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape formfeed", ?_assertEqual( + <<"\\f">>, + clean_string(<<16#000c/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape carriage return", ?_assertEqual( + <<"\\r">>, + clean_string(<<16#000d/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape quote", ?_assertEqual( + <<"\\\"">>, + clean_string(<<16#0022/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape forward slash", ?_assertEqual( + <<"\\/">>, + clean_string(<<16#002f/utf8>>, #config{escaped_strings=true, escaped_forward_slashes=true}) + )}, + {"do not maybe_escape forward slash", ?_assertEqual( + <<"/">>, + clean_string(<<16#002f/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape backslash", ?_assertEqual( + <<"\\\\">>, + clean_string(<<16#005c/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape jsonp (u2028)", ?_assertEqual( + <<"\\u2028">>, + clean_string(<<16#2028/utf8>>, #config{escaped_strings=true}) + )}, + {"do not maybe_escape jsonp (u2028)", ?_assertEqual( + <<16#2028/utf8>>, + clean_string(<<16#2028/utf8>>, #config{escaped_strings=true, unescaped_jsonp=true}) + )}, + {"maybe_escape jsonp (u2029)", ?_assertEqual( + <<"\\u2029">>, + clean_string(<<16#2029/utf8>>, #config{escaped_strings=true}) + )}, + {"do not maybe_escape jsonp (u2029)", ?_assertEqual( + <<16#2029/utf8>>, + clean_string(<<16#2029/utf8>>, #config{escaped_strings=true, unescaped_jsonp=true}) + )}, + {"maybe_escape u0000", ?_assertEqual( + <<"\\u0000">>, + clean_string(<<16#0000/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0001", ?_assertEqual( + <<"\\u0001">>, + clean_string(<<16#0001/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0002", ?_assertEqual( + <<"\\u0002">>, + clean_string(<<16#0002/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0003", ?_assertEqual( + <<"\\u0003">>, + clean_string(<<16#0003/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0004", ?_assertEqual( + <<"\\u0004">>, + clean_string(<<16#0004/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0005", ?_assertEqual( + <<"\\u0005">>, + clean_string(<<16#0005/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0006", ?_assertEqual( + <<"\\u0006">>, + clean_string(<<16#0006/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0007", ?_assertEqual( + <<"\\u0007">>, + clean_string(<<16#0007/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u000b", ?_assertEqual( + <<"\\u000b">>, + clean_string(<<16#000b/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u000e", ?_assertEqual( + <<"\\u000e">>, + clean_string(<<16#000e/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u000f", ?_assertEqual( + <<"\\u000f">>, + clean_string(<<16#000f/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0010", ?_assertEqual( + <<"\\u0010">>, + clean_string(<<16#0010/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0011", ?_assertEqual( + <<"\\u0011">>, + clean_string(<<16#0011/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0012", ?_assertEqual( + <<"\\u0012">>, + clean_string(<<16#0012/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0013", ?_assertEqual( + <<"\\u0013">>, + clean_string(<<16#0013/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0014", ?_assertEqual( + <<"\\u0014">>, + clean_string(<<16#0014/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0015", ?_assertEqual( + <<"\\u0015">>, + clean_string(<<16#0015/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0016", ?_assertEqual( + <<"\\u0016">>, + clean_string(<<16#0016/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0017", ?_assertEqual( + <<"\\u0017">>, + clean_string(<<16#0017/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0018", ?_assertEqual( + <<"\\u0018">>, + clean_string(<<16#0018/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u0019", ?_assertEqual( + <<"\\u0019">>, + clean_string(<<16#0019/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001a", ?_assertEqual( + <<"\\u001a">>, + clean_string(<<16#001a/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001b", ?_assertEqual( + <<"\\u001b">>, + clean_string(<<16#001b/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001c", ?_assertEqual( + <<"\\u001c">>, + clean_string(<<16#001c/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001d", ?_assertEqual( + <<"\\u001d">>, + clean_string(<<16#001d/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001e", ?_assertEqual( + <<"\\u001e">>, + clean_string(<<16#001e/utf8>>, #config{escaped_strings=true}) + )}, + {"maybe_escape u001f", ?_assertEqual( + <<"\\u001f">>, + clean_string(<<16#001f/utf8>>, #config{escaped_strings=true}) + )} + ]. + + +bad_utf8_test_() -> + [ + {"orphan continuation byte u+0080", ?_assertError( + badarg, + clean_string(<<16#0080>>, #config{strict_utf8=true}) + )}, + {"orphan continuation byte u+0080 replaced", ?_assertEqual( + <<16#fffd/utf8>>, + clean_string(<<16#0080>>, #config{}) + )}, + {"orphan continuation byte u+00bf", ?_assertError( + badarg, + clean_string(<<16#00bf>>, #config{strict_utf8=true}) + )}, + {"orphan continuation byte u+00bf replaced", ?_assertEqual( + <<16#fffd/utf8>>, + clean_string(<<16#00bf>>, #config{}) + )}, + {"2 continuation bytes", ?_assertError( + badarg, + clean_string(<<(binary:copy(<<16#0080>>, 2))/binary>>, #config{strict_utf8=true}) + )}, + {"2 continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, 2), + clean_string(<<(binary:copy(<<16#0080>>, 2))/binary>>, #config{}) + )}, + {"3 continuation bytes", ?_assertError( + badarg, + clean_string(<<(binary:copy(<<16#0080>>, 3))/binary>>, #config{strict_utf8=true}) + )}, + {"3 continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, 3), + clean_string(<<(binary:copy(<<16#0080>>, 3))/binary>>, #config{}) + )}, + {"4 continuation bytes", ?_assertError( + badarg, + clean_string(<<(binary:copy(<<16#0080>>, 4))/binary>>, #config{strict_utf8=true}) + )}, + {"4 continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, 4), + clean_string(<<(binary:copy(<<16#0080>>, 4))/binary>>, #config{}) + )}, + {"5 continuation bytes", ?_assertError( + badarg, + clean_string(<<(binary:copy(<<16#0080>>, 5))/binary>>, #config{strict_utf8=true}) + )}, + {"5 continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, 5), + clean_string(<<(binary:copy(<<16#0080>>, 5))/binary>>, #config{}) + )}, + {"6 continuation bytes", ?_assertError( + badarg, + clean_string(<<(binary:copy(<<16#0080>>, 6))/binary>>, #config{strict_utf8=true}) + )}, + {"6 continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, 6), + clean_string(<<(binary:copy(<<16#0080>>, 6))/binary>>, #config{}) + )}, + {"all continuation bytes", ?_assertError( + badarg, + clean_string(<<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>>, #config{strict_utf8=true}) + )}, + {"all continuation bytes replaced", ?_assertEqual( + binary:copy(<<16#fffd/utf8>>, length(lists:seq(16#0080, 16#00bf))), + clean_string( + <<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>>, + #config{} + ) + )}, + {"lonely start byte", ?_assertError( + badarg, + clean_string(<<16#00c0>>, #config{strict_utf8=true}) + )}, + {"lonely start byte replaced", ?_assertEqual( + <<16#fffd/utf8>>, + clean_string(<<16#00c0>>, #config{}) + )}, + {"lonely start bytes (2 byte)", ?_assertError( + badarg, + clean_string(<<16#00c0, 32, 16#00df>>, #config{strict_utf8=true}) + )}, + {"lonely start bytes (2 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + clean_string(<<16#00c0, 32, 16#00df>>, #config{}) + )}, + {"lonely start bytes (3 byte)", ?_assertError( + badarg, + clean_string(<<16#00e0, 32, 16#00ef>>, #config{strict_utf8=true}) + )}, + {"lonely start bytes (3 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + clean_string(<<16#00e0, 32, 16#00ef>>, #config{}) + )}, + {"lonely start bytes (4 byte)", ?_assertError( + badarg, + clean_string(<<16#00f0, 32, 16#00f7>>, #config{strict_utf8=true}) + )}, + {"lonely start bytes (4 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32, 16#fffd/utf8>>, + clean_string(<<16#00f0, 32, 16#00f7>>, #config{}) + )}, + {"missing continuation byte (3 byte)", ?_assertError( + badarg, + clean_string(<<224, 160, 32>>, #config{strict_utf8=true}) + )}, + {"missing continuation byte (3 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<224, 160, 32>>, #config{}) + )}, + {"missing continuation byte (4 byte missing one)", ?_assertError( + badarg, + clean_string(<<240, 144, 128, 32>>, #config{strict_utf8=true}) + )}, + {"missing continuation byte (4 byte missing one) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<240, 144, 128, 32>>, #config{}) + )}, + {"missing continuation byte (4 byte missing two)", ?_assertError( + badarg, + clean_string(<<240, 144, 32>>, #config{strict_utf8=true}) + )}, + {"missing continuation byte (4 byte missing two) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<240, 144, 32>>, #config{}) + )}, + {"overlong encoding of u+002f (2 byte)", ?_assertError( + badarg, + clean_string(<<16#c0, 16#af, 32>>, #config{strict_utf8=true}) + )}, + {"overlong encoding of u+002f (2 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#c0, 16#af, 32>>, #config{}) + )}, + {"overlong encoding of u+002f (3 byte)", ?_assertError( + badarg, + clean_string(<<16#e0, 16#80, 16#af, 32>>, #config{strict_utf8=true}) + )}, + {"overlong encoding of u+002f (3 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#e0, 16#80, 16#af, 32>>, #config{}) + )}, + {"overlong encoding of u+002f (4 byte)", ?_assertError( + badarg, + clean_string(<<16#f0, 16#80, 16#80, 16#af, 32>>, #config{strict_utf8=true}) + )}, + {"overlong encoding of u+002f (4 byte) replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#f0, 16#80, 16#80, 16#af, 32>>, #config{}) + )}, + {"highest overlong 2 byte sequence", ?_assertError( + badarg, + clean_string(<<16#c1, 16#bf, 32>>, #config{strict_utf8=true}) + )}, + {"highest overlong 2 byte sequence replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#c1, 16#bf, 32>>, #config{}) + )}, + {"highest overlong 3 byte sequence", ?_assertError( + badarg, + clean_string(<<16#e0, 16#9f, 16#bf, 32>>, #config{strict_utf8=true}) + )}, + {"highest overlong 3 byte sequence replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#e0, 16#9f, 16#bf, 32>>, #config{}) + )}, + {"highest overlong 4 byte sequence", ?_assertError( + badarg, + clean_string(<<16#f0, 16#8f, 16#bf, 16#bf, 32>>, #config{strict_utf8=true}) + )}, + {"highest overlong 4 byte sequence replaced", ?_assertEqual( + <<16#fffd/utf8, 32>>, + clean_string(<<16#f0, 16#8f, 16#bf, 16#bf, 32>>, #config{}) + )} + ]. + + +json_escape_sequence_test_() -> + [ + {"json escape sequence test - 16#0000", ?_assertEqual(<<"\\u0000"/utf8>>, json_escape_sequence(16#0000))}, + {"json escape sequence test - 16#abc", ?_assertEqual(<<"\\u0abc"/utf8>>, json_escape_sequence(16#abc))}, + {"json escape sequence test - 16#def", ?_assertEqual(<<"\\u0def"/utf8>>, json_escape_sequence(16#def))} + ]. + + +uescape_test_() -> + [ + {"\"\\u0080\"", ?_assertEqual( + <<"\\u0080">>, + clean_string(<<128/utf8>>, #config{uescape=true}) + )}, + {"\"\\u8ca8\\u5481\\u3002\\u0091\\u0091\"", ?_assertEqual( + <<"\\u8ca8\\u5481\\u3002\\u0091\\u0091">>, + clean_string( + <<232,178,168,229,146,129,227,128,130,194,145,194,145>>, + #config{uescape=true} + ) + )}, + {"\"\\ud834\\udd1e\"", ?_assertEqual( + <<"\\ud834\\udd1e">>, + clean_string(<<240, 157, 132, 158>>, #config{uescape=true}) + )}, + {"\"\\ud83d\\ude0a\"", ?_assertEqual( + <<"\\ud83d\\ude0a">>, + clean_string(<<240, 159, 152, 138>>, #config{uescape=true}) + )} + ]. + + +fix_key_test_() -> + [ + {"binary key", ?_assertEqual(fix_key(<<"foo">>), <<"foo">>)}, + {"atom key", ?_assertEqual(fix_key(foo), <<"foo">>)}, + {"integer key", ?_assertEqual(fix_key(123), <<"123">>)} + ]. + + +datetime_test_() -> + [ + {"datetime", ?_assertEqual( + [start_array, {string, <<"2014-08-13T23:12:34Z">>}, end_array, end_json], + parse([start_array, {{2014,08,13},{23,12,34}}, end_array, end_json], []) + )}, + {"datetime", ?_assertEqual( + [start_array, {string, <<"2014-08-13T23:12:34.363369Z">>}, end_array, end_json], + parse([start_array, {{2014,08,13},{23,12,34.363369}}, end_array, end_json], []) + )} + ]. + + +timestamp_test_() -> + [ + {"timestamp", ?_assertEqual( + [start_array, {string, <<"2016-01-15T18:19:28Z">>}, end_array, end_json], + parse([start_array, {1452,881968,111772}, end_array, end_json], []) + )} + ]. + + +rogue_tuple_test_() -> + [ + {"kv in value position of object", ?_assertError( + badarg, + parse([start_object, <<"key">>, {<<"key">>, <<"value">>}, end_object, end_json], []) + )}, + {"kv in value position of list", ?_assertError( + badarg, + parse([start_array, {<<"key">>, <<"value">>}, end_array, end_json], []) + )} + ]. + + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_to_json.erl b/server/_build/default/plugins/jsx/src/jsx_to_json.erl new file mode 100644 index 0000000..fb14df3 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_to_json.erl @@ -0,0 +1,409 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_to_json). + +-export([to_json/2, format/2]). +-export([init/1, handle_event/2]). +-export([start_json/0, start_json/1]). +-export([start_object/1, start_array/1, finish/1, insert/2, get_key/1, get_value/1]). + + +-record(config, { + space = 0, + indent = 0, + depth = 0, + newline = <<$\n>> +}). + +-type config() :: list(). +-export_type([config/0]). + + +-spec to_json(Source::any(), Config::config()) -> binary(). + +to_json(Source, Config) when is_list(Config) -> + (jsx:encoder(?MODULE, Config, jsx_config:extract_config(Config ++ [escaped_strings])))(Source). + + +-spec format(Source::binary(), Config::config()) -> binary(). + +format(Source, Config) when is_binary(Source) andalso is_list(Config) -> + (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config ++ [escaped_strings])))(Source); +format(_, _) -> erlang:error(badarg). + + +parse_config(Config) -> parse_config(Config, #config{}). + +parse_config([{space, Val}|Rest], Config) when is_integer(Val), Val > 0 -> + parse_config(Rest, Config#config{space = Val}); +parse_config([space|Rest], Config) -> + parse_config(Rest, Config#config{space = 1}); +parse_config([{indent, Val}|Rest], Config) when is_integer(Val), Val > 0 -> + parse_config(Rest, Config#config{indent = Val}); +parse_config([indent|Rest], Config) -> + parse_config(Rest, Config#config{indent = 1}); +parse_config([{newline, Val}|Rest], Config) when is_binary(Val) -> + parse_config(Rest, Config#config{newline = Val}); +parse_config([{K, _}|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config) + ; false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([K|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config) + ; false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([], Config) -> + Config. + + +-define(start_object, <<"{">>). +-define(start_array, <<"[">>). +-define(end_object, <<"}">>). +-define(end_array, <<"]">>). +-define(colon, <<":">>). +-define(comma, <<",">>). +-define(quote, <<"\"">>). +-define(space, <<" ">>). +-define(newline, <<"\n">>). + + +-type state() :: {unicode:charlist(), #config{}}. +-spec init(Config::proplists:proplist()) -> state(). + +init(Config) -> {[], parse_config(Config)}. + + +-spec handle_event(Event::any(), State::state()) -> state(). + +handle_event(end_json, State) -> get_value(State); + +handle_event(start_object, State) -> start_object(State); +handle_event(end_object, State) -> finish(State); + +handle_event(start_array, State) -> start_array(State); +handle_event(end_array, State) -> finish(State); + +handle_event({Type, Event}, {_, Config} = State) -> insert(encode(Type, Event, Config), State). + + +encode(string, String, _Config) -> + [?quote, String, ?quote]; +encode(key, Key, _Config) -> + [?quote, Key, ?quote]; +encode(literal, Literal, _Config) -> + erlang:atom_to_list(Literal); +encode(integer, Integer, _Config) -> + erlang:integer_to_list(Integer); +encode(float, Float, _Config) -> + io_lib:format("~p", [Float]). + + +space(Config) -> + case Config#config.space of + 0 -> <<>> + ; X when X > 0 -> binary:copy(?space, X) + end. + + +indent(Config) -> + case Config#config.indent of + 0 -> <<>> + ; X when X > 0 -> <<(Config#config.newline)/binary, (binary:copy(?space, X * Config#config.depth))/binary>> + end. + + +indent_or_space(Config) -> + case Config#config.indent > 0 of + true -> indent(Config) + ; false -> space(Config) + end. + + +%% internal state is a stack and a config object +%% `{Stack, Config}` +%% the stack is a list of in progress objects/arrays +%% `[Current, Parent, Grandparent,...OriginalAncestor]` +%% an object has the representation on the stack of +%% `{object, Object}` +%% of if there's a key with a yet to be matched value +%% `{object, Key, Object}` +%% an array looks like +%% `{array, Array}` +%% `Object` and `Array` are utf8 encoded binaries + +start_json() -> {[], #config{}}. + +start_json(Config) when is_list(Config) -> {[], parse_config(Config)}. + +%% allocate a new object on top of the stack +start_object({Stack, Config = #config{depth = Depth}}) -> + {[{object, ?start_object}] ++ Stack, Config#config{depth = Depth + 1}}. + +%% allocate a new array on top of the stack +start_array({Stack, Config = #config{depth = Depth}}) -> + {[{array, ?start_array}] ++ Stack, Config#config{depth = Depth + 1}}. + +%% finish an object or array and insert it into the parent object if it exists +finish({Stack, Config = #config{depth = Depth}}) -> + NewConfig = Config#config{depth = Depth - 1}, + finish_({Stack, NewConfig}). + +finish_({[{object, <<"{">>}], Config}) -> {<<"{}">>, Config}; +finish_({[{array, <<"[">>}], Config}) -> {<<"[]">>, Config}; +finish_({[{object, <<"{">>}|Rest], Config}) -> insert(<<"{}">>, {Rest, Config}); +finish_({[{array, <<"[">>}|Rest], Config}) -> insert(<<"[]">>, {Rest, Config}); +finish_({[{object, Object}], Config}) -> + {[Object, indent(Config), ?end_object], Config}; +finish_({[{object, Object}|Rest], Config}) -> + insert([Object, indent(Config), ?end_object], {Rest, Config}); +finish_({[{array, Array}], Config}) -> + {[Array, indent(Config), ?end_array], Config}; +finish_({[{array, Array}|Rest], Config}) -> + insert([Array, indent(Config), ?end_array], {Rest, Config}); +finish_(_) -> erlang:error(badarg). + +%% insert a value when there's no parent object or array +insert(Value, {[], Config}) -> + {Value, Config}; +%% insert a key or value into an object or array, autodetects the 'right' thing +insert(Key, {[{object, Object}|Rest], Config}) -> + {[{object, Key, Object}] ++ Rest, Config}; +insert(Value, {[{object, Key, ?start_object}|Rest], Config}) -> + { + [{object, [ + ?start_object, + indent(Config), + Key, + ?colon, + space(Config), + Value + ]}] ++ Rest, + Config + }; +insert(Value, {[{object, Key, Object}|Rest], Config}) -> + { + [{object, [ + Object, + ?comma, + indent_or_space(Config), + Key, + ?colon, + space(Config), + Value + ]}] ++ Rest, + Config + }; +insert(Value, {[{array, ?start_array}|Rest], Config}) -> + {[{array, [?start_array, indent(Config), Value]}] ++ Rest, Config}; +insert(Value, {[{array, Array}|Rest], Config}) -> + { + [{array, [Array, + ?comma, + indent_or_space(Config), + Value + ]}] ++ Rest, + Config + }; +insert(_, _) -> erlang:error(badarg). + + +get_key({[{object, Key, _}|_], _}) -> Key; +get_key(_) -> erlang:error(badarg). + + +get_value({Value, _Config}) -> + try unicode:characters_to_binary(Value) + catch error:_ -> erlang:error(badarg) + end; +get_value(_) -> erlang:error(badarg). + + + +%% eunit tests + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +config_test_() -> + [ + {"empty config", ?_assertEqual(#config{}, parse_config([]))}, + {"unspecified indent/space", ?_assertEqual( + #config{space=1, indent=1}, + parse_config([space, indent]) + )}, + {"specific indent", ?_assertEqual( + #config{indent=4}, + parse_config([{indent, 4}]) + )}, + {"specific space", ?_assertEqual( + #config{space=2}, + parse_config([{space, 2}]) + )}, + {"specific space and indent", ?_assertEqual( + #config{space=2, indent=2}, + parse_config([{space, 2}, {indent, 2}]) + )}, + {"invalid opt flag", ?_assertError(badarg, parse_config([error]))}, + {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))} + ]. + + +space_test_() -> + [ + {"no space", ?_assertEqual(<<>>, space(#config{space=0}))}, + {"one space", ?_assertEqual(<<" ">>, space(#config{space=1}))}, + {"four spaces", ?_assertEqual(<<" ">>, space(#config{space=4}))} + ]. + + +indent_test_() -> + [ + {"no indent", ?_assertEqual(<<>>, indent(#config{indent=0, depth=1}))}, + {"indent 1 depth 1", ?_assertEqual( + <<?newline/binary, <<" ">>/binary>>, + indent(#config{indent=1, depth=1}) + )}, + {"indent 1 depth 2", ?_assertEqual( + <<?newline/binary, <<" ">>/binary>>, + indent(#config{indent=1, depth=2}) + )}, + {"indent 4 depth 1", ?_assertEqual( + <<?newline/binary, <<" ">>/binary>>, + indent(#config{indent=4, depth=1}) + )}, + {"indent 4 depth 2", ?_assertEqual( + <<?newline/binary, <<" ">>/binary, <<" ">>/binary>>, + indent(#config{indent=4, depth=2}) + )} + ]. + + +indent_or_space_test_() -> + [ + {"no indent so space", ?_assertEqual( + <<" ">>, + indent_or_space(#config{space=1, indent=0, depth=1}) + )}, + {"indent so no space", ?_assertEqual( + <<?newline/binary, <<" ">>/binary>>, + indent_or_space(#config{space=1, indent=1, depth=1}) + )} + ]. + + +encode_test_() -> + [ + {"0.0", ?_assert(encode(float, 0.0, #config{}) =:= ["0.0"])}, + {"1.0", ?_assert(encode(float, 1.0, #config{}) =:= ["1.0"])}, + {"-1.0", ?_assert(encode(float, -1.0, #config{}) =:= ["-1.0"])}, + {"3.1234567890987654321", + ?_assert( + encode(float, 3.1234567890987654321, #config{}) =:= ["3.1234567890987655"]) + }, + {"1.0e23", ?_assert(encode(float, 1.0e23, #config{}) =:= ["1.0e23"])}, + {"0.3", ?_assert(encode(float, 3.0/10.0, #config{}) =:= ["0.3"])}, + {"0.0001", ?_assert(encode(float, 0.0001, #config{}) =:= ["0.0001"])}, + {"0.00001", ?_assert(encode(float, 0.00001, #config{}) =:= ["1.0e-5"])}, + {"0.00000001", ?_assert(encode(float, 0.00000001, #config{}) =:= ["1.0e-8"])}, + {"1.0e-323", ?_assert(encode(float, 1.0e-323, #config{}) =:= ["1.0e-323"])}, + {"1.0e308", ?_assert(encode(float, 1.0e308, #config{}) =:= ["1.0e308"])}, + {"min normalized float", + ?_assert( + encode(float, math:pow(2, -1022), #config{}) =:= ["2.2250738585072014e-308"] + ) + }, + {"max normalized float", + ?_assert( + encode(float, (2 - math:pow(2, -52)) * math:pow(2, 1023), #config{}) + =:= ["1.7976931348623157e308"] + ) + }, + {"min denormalized float", + ?_assert(encode(float, math:pow(2, -1074), #config{}) =:= ["5.0e-324"]) + }, + {"max denormalized float", + ?_assert( + encode(float, (1 - math:pow(2, -52)) * math:pow(2, -1022), #config{}) + =:= ["2.225073858507201e-308"] + ) + }, + {"hello world", ?_assert(encode(string, <<"hello world">>, #config{}) + =:= [<<"\"">>, <<"hello world">>, <<"\"">>] + )}, + {"key", ?_assert(encode(key, <<"key">>, #config{}) =:= [<<"\"">>, <<"key">>, <<"\"">>])}, + {"1", ?_assert(encode(integer, 1, #config{}) =:= "1")}, + {"-1", ?_assert(encode(integer, -1, #config{}) =:= "-1")}, + {"true", ?_assert(encode(literal, true, #config{}) =:= "true")}, + {"false", ?_assert(encode(literal, false, #config{}) =:= "false")}, + {"null", ?_assert(encode(literal, null, #config{}) =:= "null")} + ]. + + +format_test_() -> + % {minified version, pretty version} + Cases = [ + {"empty object", <<"{}">>, <<"{}">>}, + {"empty array", <<"[]">>, <<"[]">>}, + {"single key object", <<"{\"k\":\"v\"}">>, <<"{\n \"k\": \"v\"\n}">>}, + {"single member array", <<"[true]">>, <<"[\n true\n]">>}, + {"multiple key object", + <<"{\"k\":\"v\",\"x\":\"y\"}">>, + <<"{\n \"k\": \"v\",\n \"x\": \"y\"\n}">> + }, + {"multiple member array", + <<"[1.0,2.0,3.0]">>, + <<"[\n 1.0,\n 2.0,\n 3.0\n]">> + }, + {"nested structure", + <<"[[{},[],true],{\"k\":\"v\",\"x\":\"y\"}]">>, + <<"[\n [\n {},\n [],\n true\n ],\n {\n \"k\": \"v\",\n \"x\": \"y\"\n }\n]">> + } + ], + [{Title, ?_assertEqual(Min, jsx:minify(Pretty))} || {Title, Min, Pretty} <- Cases] ++ + [{Title, ?_assertEqual(Pretty, jsx:prettify(Min))} || {Title, Min, Pretty} <- Cases]. + +custom_newline_test_() -> + [ + {"single key object", ?_assert( + jsx:format(<<"{\"k\":\"v\"}">>, [space, {indent, 2}, {newline, <<$\r>>}]) + =:= <<"{\r \"k\": \"v\"\r}">>) + } + ]. + +handle_event_test_() -> + Data = jsx:test_cases() ++ jsx:special_test_cases(), + [ + { + Title, ?_assertEqual( + JSON, + lists:foldl(fun handle_event/2, init([]), Events ++ [end_json]) + ) + } || {Title, JSON, _, Events} <- Data + ]. + + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_to_term.erl b/server/_build/default/plugins/jsx/src/jsx_to_term.erl new file mode 100644 index 0000000..e4e5f5e --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_to_term.erl @@ -0,0 +1,459 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_to_term). + +-export([to_term/2]). +-export([init/1, handle_event/2]). +-export([ + start_term/1, + start_object/1, + start_array/1, + finish/1, + insert/2, + get_key/1, + get_value/1 +]). + + +-record(config, { + labels = binary, + return_maps = false +}). + +-type config() :: list(). +-export_type([config/0]). + +-ifndef(maps_support). +-type json_value() :: list(json_value()) + | list({binary() | atom(), json_value()}) | [{},...] + | true + | false + | null + | integer() + | float() + | binary(). +-endif. + +-ifdef(maps_support). +-type json_value() :: list(json_value()) + | list({binary() | atom(), json_value()}) | [{},...] + | map() + | true + | false + | null + | integer() + | float() + | binary(). +-endif. + + +-spec to_term(Source::binary(), Config::config()) -> json_value(). + +-ifdef(maps_always). +to_term(Source, Config) when is_list(Config) -> + (jsx:decoder(?MODULE, [return_maps] ++ Config, jsx_config:extract_config(Config)))(Source). +-endif. +-ifndef(maps_always). +to_term(Source, Config) when is_list(Config) -> + (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source). +-endif. + +parse_config(Config) -> parse_config(Config, #config{}). + +parse_config([{labels, Val}|Rest], Config) + when Val == binary; Val == atom; Val == existing_atom; Val == attempt_atom -> + parse_config(Rest, Config#config{labels = Val}); +parse_config([labels|Rest], Config) -> + parse_config(Rest, Config#config{labels = binary}); +parse_config([{return_maps, Val}|Rest], Config) + when Val == true; Val == false -> + parse_config(Rest, Config#config{return_maps = Val}); +parse_config([return_maps|Rest], Config) -> + parse_config(Rest, Config#config{return_maps = true}); +parse_config([{K, _}|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config) + ; false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([K|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config) + ; false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([], Config) -> + Config. + + +-type state() :: {list(), #config{}}. +-spec init(Config::proplists:proplist()) -> state(). + +init(Config) -> start_term(Config). + +-spec handle_event(Event::any(), State::state()) -> state(). + +handle_event(end_json, State) -> get_value(State); + +handle_event(start_object, State) -> start_object(State); +handle_event(end_object, State) -> finish(State); + +handle_event(start_array, State) -> start_array(State); +handle_event(end_array, State) -> finish(State); + +handle_event({key, Key}, {_, Config} = State) -> insert(format_key(Key, Config), State); + +handle_event({_, Event}, State) -> insert(Event, State). + + +format_key(Key, Config) -> + case Config#config.labels of + binary -> Key + ; atom -> binary_to_atom(Key, utf8) + ; existing_atom -> binary_to_existing_atom(Key, utf8) + ; attempt_atom -> + try binary_to_existing_atom(Key, utf8) of + Result -> Result + catch + error:badarg -> Key + end + end. + + +%% internal state is a stack and a config object +%% `{Stack, Config}` +%% the stack is a list of in progress objects/arrays +%% `[Current, Parent, Grandparent,...OriginalAncestor]` +%% an object has the representation on the stack of +%% `{object, [ +%% {NthKey, NthValue}, +%% {NMinus1Key, NthMinus1Value}, +%% ..., +%% {FirstKey, FirstValue} +%% ]}` +%% or if returning maps +%% `{object, #{ +%% FirstKey => FirstValue, +%% SecondKey => SecondValue, +%% ..., +%% NthKey => NthValue +%% }}` +%% or if there's a key with a yet to be matched value +%% `{object, Key, ...}` +%% an array looks like +%% `{array, [NthValue, NthMinus1Value,...FirstValue]}` + +start_term(Config) when is_list(Config) -> {[], parse_config(Config)}. + + +-ifndef(maps_support). +%% allocate a new object on top of the stack +start_object({Stack, Config}) -> {[{object, []}] ++ Stack, Config}. + + +%% allocate a new array on top of the stack +start_array({Stack, Config}) -> {[{array, []}] ++ Stack, Config}. + + +%% finish an object or array and insert it into the parent object if it exists or +%% return it if it is the root object +finish({[{object, []}], Config}) -> {[{}], Config}; +finish({[{object, []}|Rest], Config}) -> insert([{}], {Rest, Config}); +finish({[{object, Pairs}], Config}) -> {lists:reverse(Pairs), Config}; +finish({[{object, Pairs}|Rest], Config}) -> insert(lists:reverse(Pairs), {Rest, Config}); +finish({[{array, Values}], Config}) -> {lists:reverse(Values), Config}; +finish({[{array, Values}|Rest], Config}) -> insert(lists:reverse(Values), {Rest, Config}); +finish(_) -> erlang:error(badarg). + + +%% insert a value when there's no parent object or array +insert(Value, {[], Config}) -> {Value, Config}; +%% insert a key or value into an object or array, autodetects the 'right' thing +insert(Key, {[{object, Pairs}|Rest], Config}) -> + {[{object, Key, Pairs}] ++ Rest, Config}; +insert(Value, {[{object, Key, Pairs}|Rest], Config}) -> + {[{object, [{Key, Value}] ++ Pairs}] ++ Rest, Config}; +insert(Value, {[{array, Values}|Rest], Config}) -> + {[{array, [Value] ++ Values}] ++ Rest, Config}; +insert(_, _) -> erlang:error(badarg). +-endif. + + +-ifdef(maps_support). +%% allocate a new object on top of the stack +start_object({Stack, Config=#config{return_maps=true}}) -> + {[{object, #{}}] ++ Stack, Config}; +start_object({Stack, Config}) -> + {[{object, []}] ++ Stack, Config}. + + +%% allocate a new array on top of the stack +start_array({Stack, Config}) -> {[{array, []}] ++ Stack, Config}. + + +%% finish an object or array and insert it into the parent object if it exists or +%% return it if it is the root object +finish({[{object, Map}], Config=#config{return_maps=true}}) -> {Map, Config}; +finish({[{object, Map}|Rest], Config=#config{return_maps=true}}) -> insert(Map, {Rest, Config}); +finish({[{object, []}], Config}) -> {[{}], Config}; +finish({[{object, []}|Rest], Config}) -> insert([{}], {Rest, Config}); +finish({[{object, Pairs}], Config}) -> {lists:reverse(Pairs), Config}; +finish({[{object, Pairs}|Rest], Config}) -> insert(lists:reverse(Pairs), {Rest, Config}); +finish({[{array, Values}], Config}) -> {lists:reverse(Values), Config}; +finish({[{array, Values}|Rest], Config}) -> insert(lists:reverse(Values), {Rest, Config}); +finish(_) -> erlang:error(badarg). + + +%% insert a value when there's no parent object or array +insert(Value, {[], Config}) -> {Value, Config}; +%% insert a key or value into an object or array, autodetects the 'right' thing +insert(Key, {[{object, Map}|Rest], Config=#config{return_maps=true}}) -> + {[{object, Key, Map}] ++ Rest, Config}; +insert(Key, {[{object, Pairs}|Rest], Config}) -> + {[{object, Key, Pairs}] ++ Rest, Config}; +insert(Value, {[{object, Key, Map}|Rest], Config=#config{return_maps=true}}) -> + {[{object, maps:put(Key, Value, Map)}] ++ Rest, Config}; +insert(Value, {[{object, Key, Pairs}|Rest], Config}) -> + {[{object, [{Key, Value}] ++ Pairs}] ++ Rest, Config}; +insert(Value, {[{array, Values}|Rest], Config}) -> + {[{array, [Value] ++ Values}] ++ Rest, Config}; +insert(_, _) -> erlang:error(badarg). +-endif. + + +get_key({[{object, Key, _}|_], _}) -> Key; +get_key(_) -> erlang:error(badarg). + + +get_value({Value, _Config}) -> Value; +get_value(_) -> erlang:error(badarg). + + + +%% eunit tests + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +config_test_() -> + [ + {"empty config", ?_assertEqual(#config{}, parse_config([]))}, + {"implicit binary labels", ?_assertEqual(#config{}, parse_config([labels]))}, + {"binary labels", ?_assertEqual(#config{}, parse_config([{labels, binary}]))}, + {"atom labels", ?_assertEqual(#config{labels=atom}, parse_config([{labels, atom}]))}, + {"existing atom labels", ?_assertEqual( + #config{labels=existing_atom}, + parse_config([{labels, existing_atom}]) + )}, + {"return_maps true", ?_assertEqual( + #config{return_maps=true}, + parse_config([return_maps]) + )}, + {"invalid opt flag", ?_assertError(badarg, parse_config([error]))}, + {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))} + ]. + + +format_key_test_() -> + [ + {"binary key", ?_assertEqual(<<"key">>, format_key(<<"key">>, #config{labels=binary}))}, + {"atom key", ?_assertEqual(key, format_key(<<"key">>, #config{labels=atom}))}, + {"existing atom key", ?_assertEqual( + key, + format_key(<<"key">>, #config{labels=existing_atom}) + )}, + {"nonexisting atom key", ?_assertError( + badarg, + format_key(<<"nonexistentatom">>, #config{labels=existing_atom}) + )}, + {"sloppy existing atom key", ?_assertEqual( + key, + format_key(<<"key">>, #config{labels=attempt_atom}) + )}, + {"nonexisting atom key", ?_assertEqual( + <<"nonexistentatom">>, + format_key(<<"nonexistentatom">>, #config{labels=attempt_atom}) + )} + ]. + + +rep_manipulation_test_() -> + [ + {"allocate a new context with option", ?_assertEqual( + {[], #config{labels=atom}}, + start_term([{labels, atom}]) + )}, + {"allocate a new object on an empty stack", ?_assertEqual( + {[{object, []}], #config{}}, + start_object({[], #config{}}) + )}, + {"allocate a new object on a stack", ?_assertEqual( + {[{object, []}, {object, []}], #config{}}, + start_object({[{object, []}], #config{}}) + )}, + {"allocate a new array on an empty stack", ?_assertEqual( + {[{array, []}], #config{}}, + start_array({[], #config{}}) + )}, + {"allocate a new array on a stack", ?_assertEqual( + {[{array, []}, {object, []}], #config{}}, + start_array({[{object, []}], #config{}}) + )}, + {"insert a key into an object", ?_assertEqual( + {[{object, key, []}, junk], #config{}}, + insert(key, {[{object, []}, junk], #config{}}) + )}, + {"get current key", ?_assertEqual( + key, + get_key({[{object, key, []}], #config{}}) + )}, + {"try to get non-key from object", ?_assertError( + badarg, + get_key({[{object, []}], #config{}}) + )}, + {"try to get key from array", ?_assertError( + badarg, + get_key({[{array, []}], #config{}}) + )}, + {"insert a value into an object", ?_assertEqual( + {[{object, [{key, value}]}, junk], #config{}}, + insert(value, {[{object, key, []}, junk], #config{}}) + )}, + {"insert a value into an array", ?_assertEqual( + {[{array, [value]}, junk], #config{}}, + insert(value, {[{array, []}, junk], #config{}}) + )}, + {"finish an object with no ancestor", ?_assertEqual( + {[{a, b}, {x, y}], #config{}}, + finish({[{object, [{x, y}, {a, b}]}], #config{}}) + )}, + {"finish an empty object", ?_assertEqual( + {[{}], #config{}}, + finish({[{object, []}], #config{}}) + )}, + {"finish an object with an ancestor", ?_assertEqual( + {[{object, [{key, [{a, b}, {x, y}]}, {foo, bar}]}], #config{}}, + finish({[{object, [{x, y}, {a, b}]}, {object, key, [{foo, bar}]}], #config{}}) + )}, + {"finish an array with no ancestor", ?_assertEqual( + {[a, b, c], #config{}}, + finish({[{array, [c, b, a]}], #config{}}) + )}, + {"finish an array with an ancestor", ?_assertEqual( + {[{array, [[a, b, c], d, e, f]}], #config{}}, + finish({[{array, [c, b, a]}, {array, [d, e, f]}], #config{}}) + )} + ]. + + +-ifdef(maps_support). +rep_manipulation_with_maps_test_() -> + [ + {"allocate a new object on an empty stack", ?_assertEqual( + {[{object, #{}}], #config{return_maps=true}}, + start_object({[], #config{return_maps=true}}) + )}, + {"allocate a new object on a stack", ?_assertEqual( + {[{object, #{}}, {object, #{}}], #config{return_maps=true}}, + start_object({[{object, #{}}], #config{return_maps=true}}) + )}, + {"insert a key into an object", ?_assertEqual( + {[{object, key, #{}}, junk], #config{return_maps=true}}, + insert(key, {[{object, #{}}, junk], #config{return_maps=true}}) + )}, + {"get current key", ?_assertEqual( + key, + get_key({[{object, key, #{}}], #config{return_maps=true}}) + )}, + {"try to get non-key from object", ?_assertError( + badarg, + get_key({[{object, #{}}], #config{return_maps=true}}) + )}, + {"insert a value into an object", ?_assertEqual( + {[{object, #{key => value}}, junk], #config{return_maps=true}}, + insert(value, {[{object, key, #{}}, junk], #config{return_maps=true}}) + )}, + {"finish an object with no ancestor", ?_assertEqual( + {#{a => b, x => y}, #config{return_maps=true}}, + finish({[{object, #{x => y, a => b}}], #config{return_maps=true}}) + )}, + {"finish an empty object", ?_assertEqual( + {#{}, #config{return_maps=true}}, + finish({[{object, #{}}], #config{return_maps=true}}) + )}, + {"finish an object with an ancestor", ?_assertEqual( + { + [{object, #{key => #{a => b, x => y}, foo => bar}}], + #config{return_maps=true} + }, + finish({ + [{object, #{x => y, a => b}}, {object, key, #{foo => bar}}], + #config{return_maps=true} + }) + )} + ]. + + +return_maps_test_() -> + [ + {"an empty map", ?_assertEqual( + #{}, + jsx:decode(<<"{}">>, [return_maps]) + )}, + {"an empty map", ?_assertEqual( + [{}], + jsx:decode(<<"{}">>, []) + )}, + {"an empty map", ?_assertEqual( + [{}], + jsx:decode(<<"{}">>, [{return_maps, false}]) + )}, + {"a small map", ?_assertEqual( + #{<<"awesome">> => true, <<"library">> => <<"jsx">>}, + jsx:decode(<<"{\"library\": \"jsx\", \"awesome\": true}">>, [return_maps]) + )}, + {"a recursive map", ?_assertEqual( + #{<<"key">> => #{<<"key">> => true}}, + jsx:decode(<<"{\"key\": {\"key\": true}}">>, [return_maps]) + )}, + {"a map inside a list", ?_assertEqual( + [#{}], + jsx:decode(<<"[{}]">>, [return_maps]) + )} + ]. +-endif. + + +handle_event_test_() -> + Data = jsx:test_cases(), + [ + { + Title, ?_assertEqual( + Term, + lists:foldl(fun handle_event/2, init([]), Events ++ [end_json]) + ) + } || {Title, _, Term, Events} <- Data + ]. + + +-endif. diff --git a/server/_build/default/plugins/jsx/src/jsx_verify.erl b/server/_build/default/plugins/jsx/src/jsx_verify.erl new file mode 100644 index 0000000..5f4a3d8 --- /dev/null +++ b/server/_build/default/plugins/jsx/src/jsx_verify.erl @@ -0,0 +1,119 @@ +%% The MIT License + +%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca> + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + + +-module(jsx_verify). + +-export([is_json/2, is_term/2]). +-export([init/1, handle_event/2]). + + +-spec is_json(Source::binary(), Config::proplists:proplist()) -> true | false | {incomplete, jsx:decoder()}. + +is_json(Source, Config) when is_list(Config) -> + try (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source) + catch error:badarg -> false + end. + + +-spec is_term(Source::any(), Config::proplists:proplist()) -> true | false | {incomplete, jsx:encoder()}. + +is_term(Source, Config) when is_list(Config) -> + try (jsx:encoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source) + catch error:badarg -> false + end. + + +parse_config(Config) -> parse_config(Config, []). + +%% ignore deprecated flags +parse_config([no_repeated_keys|Rest], Config) -> + parse_config(Rest, Config); +parse_config([{repeated_keys, Val}|Rest], Config) when Val == true; Val == false -> + parse_config(Rest, Config); +parse_config([repeated_keys|Rest], Config) -> + parse_config(Rest, Config); +parse_config([{K, _}|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config); + false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([K|Rest] = Options, Config) -> + case lists:member(K, jsx_config:valid_flags()) of + true -> parse_config(Rest, Config); + false -> erlang:error(badarg, [Options, Config]) + end; +parse_config([], Config) -> + Config. + + +%% we don't actually need any state for this +-type state() :: []. +-spec init(Config::proplists:proplist()) -> state(). + +init(Config) -> parse_config(Config). + + +-spec handle_event(Event::any(), State::state()) -> state(). + +handle_event(end_json, _) -> true; + +handle_event(_, State) -> State. + + + +%% eunit tests +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +config_test_() -> + [ + {"empty config", ?_assertEqual([], parse_config([]))}, + {"no repeat keys", ?_assertEqual([], parse_config([no_repeated_keys]))}, + {"bare repeated keys", ?_assertEqual([], parse_config([repeated_keys]))}, + {"repeated keys true", ?_assertEqual( + [], + parse_config([{repeated_keys, true}]) + )}, + {"repeated keys false", ?_assertEqual( + [], + parse_config([{repeated_keys, false}]) + )}, + {"invalid opt flag", ?_assertError(badarg, parse_config([error]))}, + {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))} + ]. + + +handle_event_test_() -> + Data = jsx:test_cases() ++ jsx:special_test_cases(), + [ + { + Title, ?_assertEqual( + true, + lists:foldl(fun handle_event/2, [], Events ++ [end_json]) + ) + } || {Title, _, _, Events} <- Data + ]. + + +-endif. |