diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4fcb3d4 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +ebin/ +deps/ +*.plt +*.beam +app.config +.eunit +log* diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000..354de6f --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,23 @@ +Copyright (C) 2012 Issuu ApS. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d6bb5ab --- /dev/null +++ b/Makefile @@ -0,0 +1,55 @@ +REBAR = ./rebar +DIALYZER = dialyzer +TOUCH = touch + +.PHONY: all deps compile escripize clean doc eunit ct test \ + run plt analyze get-deps compile-deps + +all: deps compile + +deps: get-deps compile-deps + +compile: + @$(REBAR) compile skip_deps=true + +escriptize: + @$(REBAR) escriptize + +clean: + @$(REBAR) clean + @rm -f test/*.beam erl_crash.dump ./deps/.compile-deps + +eunit: deps compile + @$(REBAR) skip_deps=true eunit + +ct: deps compile + @$(REBAR) skip_deps=true ct + +test: eunit ct + +plt: + @$(DIALYZER) --build_plt --output_plt .backend-api.plt \ + -pa deps/lager/ebin \ + -pa deps/mochiweb/ebin \ + -c deps/mochiweb/ebin \ + --apps kernel stdlib sasl inets crypto \ + public_key ssl mnesia runtime_tools erts \ + compiler tools syntax_tools xmerl hipe webtool + +analyze: compile + @$(DIALYZER) --no_check_plt \ + -c ebin \ + --plt .ddb.plt \ + -pa deps/lager/ebin \ + -pa deps/mochiweb/ebin \ + -Werror_handling \ + -Wunmatched_returns #-Wunderspecs + +docs: + @$(REBAR) doc skip_deps=true + +get-deps: + @$(REBAR) get-deps + +compile-deps: + @$(REBAR) compile diff --git a/README.md b/README.md new file mode 100644 index 0000000..b34c165 --- /dev/null +++ b/README.md @@ -0,0 +1,81 @@ +# Accessing Amazon DynamoDB + +Authenticating + + ddb_iam:credentials("access key", "secret"). + {'ok', Key, Secret, Token} = ddb_iam:token(129600). + ddb:credentials(Key, Secret, Token). + + +Creating a table with a hash key + + ddb:create(<<"foo">>, ddb:key_type(<<"hashkey">>, 'string'), 10, 10). + +Creating a table with hash and range keys + + ddb:create(<<"bar">>, ddb:key_type(<<"hashkey">>, 'string', <<"rangekey">>, 'number'), 10, 10). + +Adding a record to a table with a hash key + + ddb:put(<<"foo">>, [{<<"hashkey">>, <<"hash key value">>, 'string'}, + {<<"field1">>, <<"string value">>, 'string'}, + {<<"field2">>, <<"100">>, 'number'}]). + +Adding a record to a table with hash and range keys + + ddb:put(<<"bar">>, [{<<"hashkey">>, <<"hash key value">>, 'string'}, + {<<"rangekey">>, <<"1000">>, 'number'}, + {<<"field1">>, <<"string value">>, 'string'}, + {<<"field2">>, <<"100">>, 'number'}]). + + ddb:put(<<"bar">>, [{<<"hashkey">>, <<"hash key value">>, 'string'}, + {<<"rangekey">>, <<"2000">>, 'number'}, + {<<"field3">>, <<"string value">>, 'string'}]). + +Fetching a record from a table using a hash key + + ddb:get(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string')). + +Fetching a record from a table using hash and range keys + + ddb:get(<<"bar">>, ddb:key_value(<<"hash key value">>, 'string', <<"1000">>, 'number')). + +Querying a table + + ddb:find(<<"bar">>, {<<"hash key value">>, 'string'}, {'between', 'number', [<<"1000">>, <<"2000">>]}). + +Changing value (and type) of one field while deleting another + + ddb:update(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string'), + [{<<"field1">>, <<"1">>, 'number', 'put'}, + {<<"field2">>, 'delete'}]). + +Adding to a string set field and returning pre-update values of updated fields + + ddb:update(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string'), + [{<<"field2">>, [<<"1">>, <<"2">>], 'string_set', 'add'}], + 'updated_old'). + +Deleting an item from a string set field and returning the values of all fields before the update + + ddb:update(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string'), + [{<<"field2">>, [<<"1">>], 'string_set', 'delete'}], + 'all_old'). + +Update field1 only when field2 does not exist + + ddb:cond_update(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string'), + [{<<"field1">>, <<"1">>, 'number', 'put'}], + {'does_not_exist', <<"field2">>}). + +Update field1 only when field2 exists and has a numerical value of 1 + + ddb:cond_update(<<"foo">>, ddb:key_value(<<"hash key value">>, 'string'), + [{<<"field1">>, <<"1">>, 'number', 'put'}], + {'exists', <<"field2">>, <<"1">>, 'number'}). + +See `src/ddb.erl` for the rest of the API. + +Note that dates in Dynamo are represented as seconds since Unix epoch! + +All data is stored as strings but you have to specify whether each field is a string or a number. diff --git a/rebar b/rebar new file mode 100755 index 0000000..55ce6b0 Binary files /dev/null and b/rebar differ diff --git a/rebar.config b/rebar.config new file mode 100644 index 0000000..a5a9173 --- /dev/null +++ b/rebar.config @@ -0,0 +1,30 @@ +%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*- +%% ex: ts=4 sw=4 ft=erlang et + +{lib_dirs, ["deps"]}. + +{deps, [ + {lager, ".*", {git, "git://github.com/basho/lager.git", {branch, "master"}}}, + {jsx, ".*", {git, "git://github.com/talentdeficit/jsx.git", {branch, "master"}}}, + {mochiweb, ".*", {git, "git://github.com/mochi/mochiweb.git", {branch, "master"}}}, + {lhttpc, ".*", {git, "git://github.com/oscarh/lhttpc.git", {branch, "master"}}} + ]}. + +{require_otp_vsn, "R14"}. + +{erl_opts, [ + fail_on_warning, + debug_info, + %%warn_missing_spec, + {parse_transform, lager_transform} + ]}. + +{cover_enabled, true}. + +{clean_files, ["*.eunit", "ebin/*.beam"]}. + +{eunit_opts, [verbose, {report, {eunit_surefire, [{dir, "."}]}}]}. + +{validate_app_modules, false}. + +{sub_dirs, ["src"]}. diff --git a/src/ddb.app.src b/src/ddb.app.src new file mode 100644 index 0000000..c211157 --- /dev/null +++ b/src/ddb.app.src @@ -0,0 +1,19 @@ +%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*- +%% ex: ts=4 sw=4 ft=erlang et + +{application, ddb, + [ + {description, "AWS DynamoDB client"}, + {vsn, git}, + {registered, [ddb_sup]}, + {applications, [ + kernel, + stdlib, + lager, + crypto, + inets, + ssl + ]}, + {mod, {ddb_app, []}}, + {env, []} + ]}. diff --git a/src/ddb.erl b/src/ddb.erl new file mode 100644 index 0000000..416fc72 --- /dev/null +++ b/src/ddb.erl @@ -0,0 +1,384 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb). + +-export([credentials/3, tables/0, + key_type/2, key_type/4, + key_value/2, key_value/4, + create/4, describe/1, delete/1, + get/2, put/2, update/3, update/4, + cond_update/4, cond_update/5, + now/0, find/3, find/4]). + +-define(DDB_DOMAIN, "dynamodb.us-east-1.amazonaws.com"). +-define(DDB_ENDPOINT, "http://" ++ ?DDB_DOMAIN ++ "/"). +-define(DDB_AMZ_PREFIX, "x-amz-"). + +-define(SIGNATURE_METHOD, "HmacSHA1"). +-define(MAX_RETRIES, 4). + +%%% Request headers + +-define(HOST_HEADER, "Host"). +-define(DATE_HEADER, "X-Amz-Date"). +-define(AUTHORIZATION_HEADER, "X-Amzn-Authorization"). +-define(TOKEN_HEADER, "x-amz-security-token"). +-define(TARGET_HEADER, "X-Amz-Target"). +-define(CONTENT_TYPE_HEADER, "Content-Type"). + +-define(CONTENT_TYPE, "application/x-amz-json-1.0"). + +%%% Endpoint targets + +-define(TG_CREATE_TABLE, "DynamoDBv20110924.CreateTable"). +-define(TG_LIST_TABLES, "DynamoDBv20110924.ListTables"). +-define(TG_DESCRIBE_TABLE, "DynamoDBv20110924.DescribeTable"). +-define(TG_DELETE_TABLE, "DynamoDBv20110924.DeleteTable"). +-define(TG_PUT_ITEM, "DynamoDBv20110924.PutItem"). +-define(TG_GET_ITEM, "DynamoDBv20110924.GetItem"). +-define(TG_UPDATE_ITEM, "DynamoDBv20110924.UpdateItem"). +-define(TG_QUERY, "DynamoDBv20110924.Query"). + +-define(HTTP_OPTIONS, []). + +-type tablename() :: binary(). +-type type() :: 'number' | 'string' | 'number_set' | 'string_set'. +-type condition() :: 'between' | 'equal'. % TBD implement others +-type key_value() :: {binary(), type()}. +-type find_cond() :: {condition(), type(), [_]}. +-type json() :: [_]. +-type key_json() :: json(). +-type json_reply() :: {'ok', json()} | {'error', json()}. +-type put_attr() :: {binary(), binary(), type()}. +-type update_action() :: 'put' | 'add' | 'delete'. +-type update_attr() :: {binary(), binary(), type(), 'put' | 'add'} | {binary(), 'delete'}. +-type returns() :: 'none' | 'all_old' | 'updated_old' | 'all_new' | 'updated_new'. +-type update_cond() :: {'does_not_exist', binary()} | {'exists', binary(), binary(), type()}. + +%%% Set temporary credentials, use ddb_iam:token/1 to fetch from AWS. + +-spec credentials(string(), string(), string()) -> 'ok'. + +credentials(AccessKeyId, SecretAccessKey, SessionToken) -> + 'ok' = application:set_env('ddb', 'accesskeyid', AccessKeyId), + 'ok' = application:set_env('ddb', 'secretaccesskey', SecretAccessKey), + 'ok' = application:set_env('ddb', 'sessiontoken', SessionToken). + +%%% Retrieve stored credentials. + +-spec credentials() -> {'ok', string(), string(), string()}. + +credentials() -> + {'ok', AccessKeyId} = application:get_env('ddb', 'accesskeyid'), + {'ok', SecretAccessKey} = application:get_env('ddb', 'secretaccesskey'), + {'ok', SessionToken} = application:get_env('ddb', 'sessiontoken'), + {'ok', AccessKeyId, SecretAccessKey, SessionToken}. + +%%% Create a key type, either hash or hash and range. + +-spec key_type(binary(), type()) -> json(). + +key_type(HashKey, HashKeyType) + when is_binary(HashKey), + is_atom(HashKeyType) -> + [{<<"HashKeyElement">>, + [{<<"AttributeName">>, HashKey}, + {<<"AttributeType">>, type(HashKeyType)}]}]. + +-spec key_type(binary(), type(), binary(), type()) -> json(). + +key_type(HashKey, HashKeyType, RangeKey, RangeKeyType) + when is_binary(HashKey), + is_atom(HashKeyType), + is_binary(RangeKey), + is_atom(RangeKeyType) -> + [{<<"HashKeyElement">>, + [{<<"AttributeName">>, HashKey}, + {<<"AttributeType">>, type(HashKeyType)}]}, + {<<"RangeKeyElement">>, + [{<<"AttributeName">>, RangeKey}, + {<<"AttributeType">>, type(RangeKeyType)}]}]. + +%%% Create table. Use key_type/2 or key_type/4 as key. + +-spec create(tablename(), key_json(), pos_integer(), pos_integer()) -> json_reply(). + +create(Name, Keys, ReadsPerSec, WritesPerSec) + when is_binary(Name), + is_list(Keys), + is_integer(ReadsPerSec), + is_integer(WritesPerSec) -> + JSON = [{<<"TableName">>, Name}, + {<<"KeySchema">>, Keys}, + {<<"ProvisionedThroughput">>, [{<<"ReadsPerSecond">>, ReadsPerSec}, + {<<"WritesPerSecond">>, WritesPerSec}]}], + request(?TG_CREATE_TABLE, JSON). + +%%% Fetch list of created tabled. + +-spec tables() -> {'ok', [tablename()]}. + +tables() -> + {'ok', JSON} = request(?TG_LIST_TABLES, [{}]), + [{<<"TableNames">>, {<<"array">>, Tables}}] = JSON, + {'ok', Tables}. + +%%% Describe table. + +-spec describe(tablename()) -> json_reply(). + +describe(Name) + when is_binary(Name) -> + JSON = [{<<"TableName">>, Name}], + request(?TG_DESCRIBE_TABLE, JSON). + +%%% Delete table. + +-spec delete(tablename()) -> json_reply(). + +delete(Name) + when is_binary(Name) -> + JSON = [{<<"TableName">>, Name}], + request(?TG_DELETE_TABLE, JSON). + +%%% Put item attributes into table. + +-spec put(tablename(), [put_attr()]) -> json_reply(). + +put(Name, Attributes) + when is_binary(Name) -> + JSON = [{<<"TableName">>, Name}, + {<<"Item">>, format_put_attrs(Attributes)}], + request(?TG_PUT_ITEM, JSON). + +%%% Create a key value, either hash or hash and range. + +-spec key_value(binary(), type()) -> json(). + +key_value(HashKeyValue, HashKeyType) + when is_binary(HashKeyValue), + is_atom(HashKeyType) -> + [{<<"Key">>, [{<<"HashKeyElement">>, + [{type(HashKeyType), HashKeyValue}]}]}]. + +-spec key_value(binary(), type(), binary(), type()) -> json(). + +key_value(HashKeyValue, HashKeyType, RangeKeyValue, RangeKeyType) + when is_binary(HashKeyValue), + is_atom(HashKeyType), + is_binary(RangeKeyValue), + is_atom(RangeKeyType) -> + [{<<"Key">>, [{<<"HashKeyElement">>, + [{type(HashKeyType), HashKeyValue}]}, + {<<"RangeKeyElement">>, + [{type(RangeKeyType), RangeKeyValue}]}]}]. + +%%% Update attributes of an existing item. + +-spec update(tablename(), key_json(), [update_attr()]) -> json_reply(). + +update(Name, Keys, Attributes) -> + update(Name, Keys, Attributes, 'none'). + +-spec update(tablename(), key_json(), [update_attr()], returns()) -> json_reply(). + +update(Name, Keys, Attributes, Returns) + when is_binary(Name), + is_list(Keys), + is_list(Attributes), + is_atom(Returns) -> + JSON = [{<<"TableName">>, Name}, + {<<"ReturnValues">>, returns(Returns)}] + ++ Keys + ++ [{<<"AttributeUpdates">>, format_update_attrs(Attributes)}], + request(?TG_UPDATE_ITEM, JSON). + +%%% Conditionally update attributes of an existing item. + +-spec cond_update(tablename(), key_json(), [update_attr()], update_cond()) -> json_reply(). + +cond_update(Name, Keys, Attributes, Condition) -> + cond_update(Name, Keys, Attributes, Condition, 'none'). + +-spec cond_update(tablename(), key_json(), [update_attr()], update_cond(), returns()) -> json_reply(). + +cond_update(Name, Keys, Attributes, Condition, Returns) + when is_binary(Name), + is_list(Keys), + is_list(Attributes), + is_atom(Returns) -> + JSON = [{<<"TableName">>, Name}, + {<<"ReturnValues">>, returns(Returns)}] + ++ Keys + ++ [{<<"AttributeUpdates">>, format_update_attrs(Attributes)}] + ++ format_update_cond(Condition), + request(?TG_UPDATE_ITEM, JSON). + +%%% Fetch all item attributes from table. + +-spec get(tablename(), key_json()) -> json_reply(). + +get(Name, Keys) + when is_binary(Name), + is_list(Keys) -> + JSON = [{<<"TableName">>, Name}] ++ Keys, + request(?TG_GET_ITEM, JSON). + +%%% Fetch all item attributes from table using a condition. + +-spec find(tablename(), key_value(), find_cond()) -> json_reply(). + +find(Name, HashKey, RangeKeyCond) -> + find(Name, HashKey, RangeKeyCond, 'none'). + +%%% Fetch all item attributes from table using a condition, with pagination. + +-spec find(tablename(), key_value(), find_cond(), json() | 'none') -> json_reply(). + +find(Name, {HashKeyValue, HashKeyType}, {Condition, RangeKeyType, RangeKeyValues}, StartKey) + when is_binary(Name), + is_binary(HashKeyValue), + is_atom(HashKeyType), + is_atom(Condition), + is_atom(RangeKeyType), + is_list(RangeKeyValues) -> + {Op, Values} = case Condition of + 'between' -> + [A, B] = RangeKeyValues, + {<<"BETWEEN">>, [[{type(RangeKeyType), A}], + [{type(RangeKeyType), B}]]}; + 'equal' -> + {<<"EQ">>, [[{type(RangeKeyType), hd(RangeKeyValues)}]]} + end, + JSON = [{<<"TableName">>, Name}, + {<<"HashKeyValue">>, + [{type(HashKeyType), HashKeyValue}]}, + {<<"RangeKeyCondition">>, + [{<<"AttributeValueList">>, Values}, + {<<"ComparisonOperator">>, Op}]}], + JSON1 = case StartKey of + 'none' -> JSON; + _ -> [{<<"ExclusiveStartKey">>, StartKey}|JSON] + end, + request(?TG_QUERY, JSON1). + +%%% +%%% Helper functions +%%% + +-spec format_put_attrs([put_attr()]) -> json(). + +format_put_attrs(Attributes) -> + lists:map(fun({Name, Value, Type}) -> + {Name, [{type(Type), Value}]} + end, Attributes). + +-spec format_update_attrs([update_attr()]) -> json(). + +format_update_attrs(Attributes) -> + lists:map(fun({Name, Value, Type, Action}) -> + {Name, [{<<"Value">>, [{type(Type), Value}]}, + {<<"Action">>, update_action(Action)}]}; + ({Name, 'delete'}) -> + {Name, [{<<"Action">>, update_action('delete')}]} + end, Attributes). + +-spec format_update_cond(update_cond()) -> json(). + +format_update_cond({'does_not_exist', Name}) -> + [{<<"Expected">>, [{Name, [{<<"Exists">>, <<"false">>}]}]}]; + +format_update_cond({'exists', Name, Value, Type}) -> + [{<<"Expected">>, [{Name, [{<<"Value">>, [{type(Type), Value}]}]}]}]. + +-spec type(type()) -> binary(). + +type('string') -> <<"S">>; +type('number') -> <<"N">>; +type('string_set') -> <<"SS">>; +type('number_set') -> <<"NN">>. + +-spec returns(returns()) -> binary(). + +returns('none') -> <<"NONE">>; +returns('all_old') -> <<"ALL_OLD">>; +returns('updated_old') -> <<"UPDATED_OLD">>; +returns('all_new') -> <<"ALL_NEW">>; +returns('updated_new') -> <<"UPDATED_NEW">>. + +-spec update_action(update_action()) -> binary(). + +update_action('put') -> <<"PUT">>; +update_action('add') -> <<"ADD">>; +update_action('delete') -> <<"DELETE">>. + +-spec request(string(), json()) -> json_reply(). + +request(Target, JSON) -> + Body = jsx:to_json(JSON), + Headers = headers(Target, Body), + Opts = [{'body_format', 'binary'}], + F = fun() -> httpc:request('post', {?DDB_ENDPOINT, Headers, ?CONTENT_TYPE, Body}, [], Opts) end, + ddb_aws:retry(F, ?MAX_RETRIES, fun jsx:to_term/1). + +-spec headers(string(), binary()) -> proplists:proplist(). + +headers(Target, Body) -> + {'ok', AccessKeyId, SecretAccessKey, SessionToken} = credentials(), + Date = ddb_util:rfc1123_date(), + Headers = [{?DATE_HEADER, Date}, + {?TARGET_HEADER, Target}, + {?TOKEN_HEADER, SessionToken}, + {?CONTENT_TYPE_HEADER, ?CONTENT_TYPE}], + Authorization = authorization(AccessKeyId, SecretAccessKey, Headers, Body), + [{?AUTHORIZATION_HEADER, Authorization}|Headers]. + +-spec authorization(string(), string(), proplists:proplist(), binary()) -> string(). + +authorization(AccessKeyId, SecretAccessKey, Headers, Body) -> + Signature = signature(SecretAccessKey, Headers, Body), + lists:flatten(io_lib:format("AWS3 AWSAccessKeyId=~s,Algorithm=~s,Signature=~s", + [AccessKeyId, ?SIGNATURE_METHOD, Signature])). + +-spec signature(string(), proplists:proplist(), binary()) -> string(). + +signature(SecretAccessKey, Headers, Body) -> + StringToSign = lists:flatten(["POST", $\n, "/", $\n, $\n, canonical(Headers), $\n, Body]), + BytesToSign = crypto:sha(StringToSign), + base64:encode_to_string(binary_to_list(crypto:sha_mac(SecretAccessKey, BytesToSign))). + +-spec canonical(proplists:proplist()) -> [_]. + +canonical(Headers) -> + Headers1 = lists:map(fun({K, V}) -> {ddb_util:to_lower(K), V} end, Headers), + Amz = lists:filter(fun({K, _V}) -> lists:prefix(?DDB_AMZ_PREFIX, K) end, Headers1), + Headers2 = [{ddb_util:to_lower(?HOST_HEADER), ?DDB_DOMAIN}|lists:sort(Amz)], + lists:map(fun({K, V}) -> [K, ":", V, "\n"] end, Headers2). + +-spec now() -> pos_integer(). + +now() -> + Time = calendar:local_time(), + Seconds = calendar:datetime_to_gregorian_seconds(Time), + Seconds - 62167219200. % Unix time diff --git a/src/ddb_app.erl b/src/ddb_app.erl new file mode 100644 index 0000000..ee88a95 --- /dev/null +++ b/src/ddb_app.erl @@ -0,0 +1,36 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_app). + +-behavior(application). + +-export([start/2, + stop/1]). + +start(_Type, _State) -> + ddb_sup:start_link(application:get_all_env()). + +stop(_State) -> + 'ok'. + diff --git a/src/ddb_aws.erl b/src/ddb_aws.erl new file mode 100644 index 0000000..c9d1811 --- /dev/null +++ b/src/ddb_aws.erl @@ -0,0 +1,78 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_aws). + +-export([retry/3, timestamp/0]). + +-define(DATE_FMT, "~4.10.0b-~2.10.0b-~2.10.0bT~2.10.0b:~2.10.0b:~2.10.0b.000Z"). + +-spec retry(function(), non_neg_integer(), function()) -> {'ok', _} | {'error', _}. + +retry(F, Max, H) + when is_function(F), + is_integer(Max), + Max >= 0, + is_function(H) -> + retry(F, Max, 0, H). + +retry(_, Max, N, _) + when Max == N -> + ok = lager:error("Maximum retries (~p) reached, aborting...", [Max]), + {'error', 'maximum_retries_reached'}; + +retry(F, Max, N, H) + when is_function(F), + is_integer(Max), + is_integer(N), + is_function(H) -> + backoff(N), + case F() of + {'ok', {{_, 200, _}, _, Body}} -> + {'ok', H(Body)}; + {'ok', {{_, Code, _}, _, Body}} when Code >= 400 andalso Code =< 500 -> + ok = lager:error("Got client error (~s) ~p, aborting...", [Code, Body]), + {'error', H(Body)}; + {'ok', {{_, Code, _}, _, Body}} -> + ok = lager:warning("Unexpected response (~s) ~p, retrying...", [Code, Body]), + retry(F, Max, N + 1, H); + {'error', Error} -> + ok = lager:debug("Got ~p retrying...", [Error]), + retry(F, Max, N + 1, H) + end. + +-spec backoff(non_neg_integer()) -> 'ok'. + +backoff(0) -> 'ok'; +backoff(Attempts) + when is_integer(Attempts) -> + %% attempt exponential backoff + Delay = round(crypto:rand_uniform(1, 101) * math:pow(4, Attempts)), + ok = lager:debug("Waiting ~bms before retrying", [Delay]), + timer:sleep(Delay). + +-spec timestamp() -> string(). + +timestamp() -> + {{YYYY, MM, DD}, {HH, MI, SS}} = erlang:universaltime(), + lists:flatten(io_lib:format(?DATE_FMT, [YYYY, MM, DD, HH, MI, SS])). diff --git a/src/ddb_iam.erl b/src/ddb_iam.erl new file mode 100644 index 0000000..9989a4b --- /dev/null +++ b/src/ddb_iam.erl @@ -0,0 +1,97 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_iam). + +-export([credentials/2, token/1]). + +-define(IAM_ENDPOINT, "https://sts.amazonaws.com/"). +-define(IAM_AWS_VERSION, "2011-06-15"). +-define(IAM_HEADER_AUTHORIZATION, "Authorization"). +-define(IAM_HEADER_AWS_VERSION, "AWS-Version"). +-define(IAM_HEADER_CONTENT_MD5, "Content-MD5"). +-define(IAM_HEADER_CONTENT_TYPE, "Content-Type"). +-define(IAM_HEADER_DATE, "Date"). +-define(IAM_MAX_RETRIES, 3). +-define(IAM_STATUS_SUCCESS_OK, "200"). +-define(IAM_STATUS_SUCCESS_NO_CONTENT, "204"). + +-define(MIME_TYPE, "application/x-www-form-urlencoded"). + +-spec credentials(string(), string()) -> 'ok'. + +credentials(AccessKeyId, SecretAccessKey) -> + 'ok' = application:set_env('iam', 'accesskeyid', AccessKeyId), + 'ok' = application:set_env('iam', 'secretaccesskey', SecretAccessKey). + +-spec credentials() -> {string(), string()}. + +credentials() -> + {'ok', AccessKeyId} = application:get_env('iam', 'accesskeyid'), + {'ok', SecretAccessKey} = application:get_env('iam', 'secretaccesskey'), + {'ok', AccessKeyId, SecretAccessKey}. + +-spec token(pos_integer()) -> {'ok', string(), string(), string()} | + {'error', string(), string()}. + +token(Duration) + when is_integer(Duration) -> + case request("GetSessionToken", ?IAM_ENDPOINT, Duration) of + {'ok', XML} -> + RElem = ddb_xml:get_child(XML, 'GetSessionTokenResult'), + CElem = ddb_xml:get_child(RElem, 'Credentials'), + Token = ddb_xml:get_child_text(CElem, 'SessionToken'), + Key = ddb_xml:get_child_text(CElem, 'AccessKeyId'), + Secret = ddb_xml:get_child_text(CElem, 'SecretAccessKey'), + {'ok', Key, Secret, Token}; + Error = {'error', 'maximum_retries_reached'} -> + Error; + {'error', XML} -> + Error = ddb_xml:get_child(XML, 'Error'), + Code = ddb_xml:get_child_text(Error, 'Code'), + Message = ddb_xml:get_child_text(Error, 'Message'), + {'error', Code, Message} + end. + +-spec request(string(), string(), non_neg_integer()) -> {'ok', string()} | + {'error', string()}. + +request(Action, Endpoint, Duration) -> + {'ok', AccessKeyId, SecretAccessKey} = credentials(), + Args = [{"AWSAccessKeyId", AccessKeyId}, + {"Action", Action}, + {"DurationSeconds", Duration}, + {"SignatureMethod", "HmacSHA1"}, + {"SignatureVersion", "2"}, + {"Timestamp", ddb_aws:timestamp()}, + {"Version", ?IAM_AWS_VERSION}], + CanonicalString = mochiweb_util:urlencode(lists:sort(Args)), + {Host, _Port, Path, _SSL} = lhttpc_lib:parse_url(Endpoint), + S = ["POST", $\n, Host, $\n, Path, $\n, CanonicalString], + Signature = base64:encode_to_string(crypto:sha_mac(SecretAccessKey, S)), + Args1 = [{"Signature", Signature}|Args], + Body = iolist_to_binary(mochiweb_util:urlencode(lists:sort(Args1))), + F = fun() -> httpc:request('post', {Endpoint, [], ?MIME_TYPE, Body}, [], []) end, + H = fun ddb_xml:parse/1, + ddb_aws:retry(F, ?IAM_MAX_RETRIES, H). + diff --git a/src/ddb_sup.erl b/src/ddb_sup.erl new file mode 100644 index 0000000..971e7c2 --- /dev/null +++ b/src/ddb_sup.erl @@ -0,0 +1,43 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_sup). + +-behaviour(supervisor). + +-export([start_link/1]). + +-export([init/1]). + +-spec start_link(proplists:proplist()) -> {'ok', pid()} | 'ignore' | {'error', _}. + +start_link(Opts) -> + supervisor:start_link({'local', ?MODULE}, ?MODULE, Opts). + +-spec init(proplists:proplist()) -> {'ok', tuple()}. + +init(_) -> + {'ok', {{'one_for_one', 5, 10}, []}}. + + + diff --git a/src/ddb_util.erl b/src/ddb_util.erl new file mode 100644 index 0000000..2aff8ce --- /dev/null +++ b/src/ddb_util.erl @@ -0,0 +1,121 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_util). + +-export([parameter_value/2, + first/1, + epoch/0, epoch/1, + to_lower/1, to_upper/1, + to_integer/1, + separate/2, + bin2hexstr/1, + rfc1123_date/0, rfc1123_date/1]). + +-define(GREGORIAN_EPOCH_DIFF, 62167219200). + +%% @doc Lookup Key in Parameter-list +%% Will search for Key, but skip certain dummy-values and assume it +%% is equivalent to a "not-found" +%% @end +-spec parameter_value(A, [{A, B}]) -> not_found | {value, B}. +parameter_value(Key, Parameters) -> + case lists:keysearch(Key, 1, Parameters) of + {value, {_, undefined}} -> not_found; + {value, {_, [16#7F]}} -> not_found; + {value, {_, Value}} -> {value, Value}; + false -> not_found + end. + +-spec to_lower(string()) -> string(). +to_lower(String) -> + lists:map(fun(C) when C >= $A, C =< $Z -> C + 16#20; (C) -> C end, + String). % Only lowercase ASCII + +-spec to_upper(string()) -> string(). +to_upper(String) -> + lists:map(fun(C) when C >= $a, C =< $z -> C - 16#20; (C) -> C end, + String). % Only lowercase ASCII + +-spec to_integer(integer() | list()) -> integer(). +to_integer(Integer) when is_integer(Integer) -> Integer; +to_integer(List) -> list_to_integer(List). + +%% @doc Given a list of lookups, pick the first valid lookup +%% @end +-spec first([not_found | {value, A}]) -> not_found | {value, A}. +first([]) -> not_found; +first([not_found | Next]) -> first(Next); +first([{value, _} = R | _]) -> R. + +%% @doc Generate number of seconds since Epoch +%% @end +-spec epoch() -> integer(). +epoch() -> + {MS, S, _US} = now(), + MS * 1000000 + S. + +%% @doc Generate the number of seconds since Epoch to `DateTime' +%% @end +-spec epoch(term()) -> integer(). +epoch(DateTime) -> + max(0, calendar:datetime_to_gregorian_seconds(DateTime) + - ?GREGORIAN_EPOCH_DIFF). + +%% @doc Generate a valid RFC1123 date +%% @end +-spec rfc1123_date() -> string(). +rfc1123_date() -> httpd_util:rfc1123_date(). + +-type date() :: {1900..3000, 1..12, 1..31}. +-type time() :: {0..23, 0..59, 0..59}. +-type date_time() :: {date(), time()}. + +-spec rfc1123_date(integer() | date_time()) -> string(). +rfc1123_date(Epoch) when is_integer(Epoch) -> + rfc1123_date(calendar:gregorian_seconds_to_datetime( + Epoch + ?GREGORIAN_EPOCH_DIFF)); +rfc1123_date(DateTime) when is_tuple(DateTime) -> + httpd_util:rfc1123_date(calendar:universal_time_to_local_time(DateTime)). + +-spec separate([E], E) -> [E]. +separate([], _Separator) -> []; +separate(List, Separator) -> + Length = length(List), + lists:foldl(fun(Elem, {N, Acc}) -> + case N of + Length -> lists:reverse([Elem | Acc]); + _ -> {N + 1, [Separator, Elem | Acc]} + + end + end, {1, []}, List). + +-spec bin2hexstr(binary() | string()) -> string(). +bin2hexstr(Binary) when is_binary(Binary) -> + bin2hexstr(binary_to_list(Binary)); +bin2hexstr(List) when is_list(List) -> + Hex = fun(Base10) -> + io_lib:format("~2.16.0b", [Base10]) + end, + lists:flatten(lists:map(Hex, List)). + diff --git a/src/ddb_xml.erl b/src/ddb_xml.erl new file mode 100644 index 0000000..f1fe9c2 --- /dev/null +++ b/src/ddb_xml.erl @@ -0,0 +1,130 @@ +%%% Copyright (C) 2012 Issuu ApS. All rights reserved. +%%% +%%% Redistribution and use in source and binary forms, with or without +%%% modification, are permitted provided that the following conditions +%%% are met: +%%% 1. Redistributions of source code must retain the above copyright +%%% notice, this list of conditions and the following disclaimer. +%%% 2. Redistributions in binary form must reproduce the above copyright +%%% notice, this list of conditions and the following disclaimer in the +%%% documentation and/or other materials provided with the distribution. +%%% +%%% THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND +%%% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +%%% IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +%%% ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE +%%% FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +%%% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +%%% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +%%% HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +%%% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +%%% SUCH DAMAGE. + +-module(ddb_xml). + +%% +%% Include files +%% +-include_lib("xmerl/include/xmerl.hrl"). + +%% +%% Exported Functions +%% +-export([format/1, + parse/1, + format_attribute/2, + format_attribute_full/2, + format_text_element_full/2, + parse_attribute/2, + get_child/2, + get_child_text/2, + get_children/1,get_children/2, + get_children_ns/2, + get_text/1, + get_node_name/1]). +-export([parse_json_att/2, + format_json_att/2]). +%% +%% API Functions +%% +format(Xml) -> + lists:flatten(xmerl:export_simple([Xml], utf8_export)). + +parse(Bin) when is_binary(Bin) -> + parse(binary_to_list(Bin)); +parse(XmlString) -> + {Xml, _} = xmerl_scan:string(XmlString, [{encoding, "iso-10646-utf-1"}]), + Xml. + +format_attribute(_Name, undefined) -> []; +format_attribute(_Name, []) -> []; +format_attribute(Name, Value) -> {Name, Value}. + +format_attribute_full(_Name, undefined) -> []; +format_attribute_full(_Name, []) -> []; +format_attribute_full(Name, Value) -> #xmlAttribute{name = Name, value = Value}. + +format_text_element_full(_Name, undefined) -> []; +format_text_element_full(_Name, []) -> []; +format_text_element_full(Name, Value) -> #xmlElement{name = Name, content = [#xmlText{value = util:from_utf8(Value)}]}. + +parse_attribute(Root, Attribute) -> + case lists:keysearch(Attribute, 2, Root#xmlElement.attributes) of + {value, Tuple} -> lists:flatten(Tuple#xmlAttribute.value); + false -> undefined + end. + +get_child(Root, ChildName) -> + case lists:keysearch(ChildName, 2, Root#xmlElement.content) of + {value, Tuple} -> Tuple; + false -> undefined + end. + +get_child_text(Root, ChildName) -> + case lists:keysearch(ChildName, 2, Root#xmlElement.content) of + {value, Tuple} -> + lists:flatmap(fun(XmlText) -> + XmlText#xmlText.value + end, Tuple#xmlElement.content); + false -> + undefined + end. + +get_children(Root) -> get_children(Root, undefined). +get_children(Root, Name) -> + lists:filter(fun(Child) when is_record(Child, xmlElement) -> + (Name == undefined) or (Child#xmlElement.name == Name); + (_Child) -> false + end, Root#xmlElement.content). + +get_children_ns(Root, NS) -> + lists:filter(fun(Child) -> + case Child#xmlElement.nsinfo of + {NSName, _ElementName} -> NSName == NS; + _ -> false + end + end, Root#xmlElement.content). + +get_text(Root) -> + lists:flatmap(fun(XmlText) -> + XmlText#xmlText.value + end, Root#xmlElement.content). + +get_node_name(Element) -> Element#xmlElement.name. + +%% JSON structs + +parse_json_att(Key, {struct, Props}) when is_list(Props) -> + case proplists:get_value(Key, Props) of + null -> undefined; + Other -> Other + end. + +format_json_att(_Name, undefined) -> []; +format_json_att(_Name, []) -> []; +format_json_att(Name, Value) -> {Name, Value}. +%% +%% Local Functions +%% +